mirror of
https://github.com/openhwgroup/cvw.git
synced 2025-04-19 03:24:50 -04:00
Merge pull request #1284 from AnonymousVikram/fetch_buffer
fetch buffer updates
This commit is contained in:
commit
f0e6d2bc41
96 changed files with 2158 additions and 1581 deletions
40
.github/workflows/install.yml
vendored
40
.github/workflows/install.yml
vendored
|
@ -37,67 +37,57 @@ jobs:
|
|||
include:
|
||||
# Ubuntu Installations
|
||||
- name: ubuntu-20.04
|
||||
os: ubuntu-20.04
|
||||
container: null
|
||||
image: ubuntu:20.04
|
||||
imageFamily: debian
|
||||
regressionFail: true
|
||||
- name: ubuntu-22.04
|
||||
os: ubuntu-22.04
|
||||
container: null
|
||||
image: ubuntu:22.04
|
||||
imageFamily: debian
|
||||
- name: ubuntu-24.04
|
||||
os: ubuntu-24.04
|
||||
container: null
|
||||
image: ubuntu:24.04
|
||||
imageFamily: debian
|
||||
# Debian Installations
|
||||
- name: debian-12
|
||||
os: ubuntu-latest
|
||||
image: debian:12
|
||||
imageFamily: debian
|
||||
- name: debian-11
|
||||
os: ubuntu-latest
|
||||
image: debian:11
|
||||
imageFamily: debian
|
||||
# Red Hat Installations
|
||||
- name: rocky-8
|
||||
os: ubuntu-latest
|
||||
image: rockylinux:8
|
||||
imageFamily: redhat
|
||||
regressionFail: true
|
||||
- name: rocky-9
|
||||
os: ubuntu-latest
|
||||
image: rockylinux:9
|
||||
imageFamily: redhat
|
||||
- name: almalinux-8
|
||||
os: ubuntu-latest
|
||||
image: almalinux:8
|
||||
imageFamily: redhat
|
||||
regressionFail: true
|
||||
- name: almalinux-9
|
||||
os: ubuntu-latest
|
||||
image: almalinux:9
|
||||
imageFamily: redhat
|
||||
# SUSE Installations
|
||||
- name: opensuse-15.6
|
||||
os: ubuntu-latest
|
||||
image: opensuse/leap:15.6
|
||||
imageFamily: suse
|
||||
# User level installation
|
||||
- name: user-install
|
||||
os: ubuntu-latest
|
||||
image: null
|
||||
user: true
|
||||
# Custom location installation
|
||||
- name: custom-install
|
||||
os: ubuntu-latest
|
||||
image: null
|
||||
riscv_path: /home/riscv
|
||||
# Custom location user level installation
|
||||
- name: custom-user-install
|
||||
os: ubuntu-latest
|
||||
image: null
|
||||
user: true
|
||||
riscv_path: $HOME/riscv-toolchain
|
||||
|
||||
# run on selected version of ubuntu or on ubuntu-latest with docker image
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ matrix.image }}
|
||||
options: --privileged --mount type=bind,source=/,target=/host --pid=host --entrypoint /bin/bash # Allow for connection with host
|
||||
|
@ -127,9 +117,9 @@ jobs:
|
|||
run: |
|
||||
df -h
|
||||
if [ -z ${{ matrix.image }} ]; then
|
||||
./.github/cli-space-cleanup.sh
|
||||
./.github/scripts/cli-space-cleanup.sh
|
||||
else
|
||||
nsenter -t 1 -m -u -n -i bash -c "$(cat .github/cli-space-cleanup.sh)"
|
||||
nsenter -t 1 -m -u -n -i bash -c "$(cat .github/scripts/cli-space-cleanup.sh)"
|
||||
fi
|
||||
df -h
|
||||
# Run main tool chain installation script, either as a user or system wide
|
||||
|
@ -159,16 +149,18 @@ jobs:
|
|||
with:
|
||||
name: installation-logs-${{ matrix.name }}
|
||||
path: ${{ env.RISCV }}/logs/
|
||||
# Only the linux-testvectors are needed, so remove the rest of the buildroot to save space
|
||||
# Logs have already been uploaded so they can be removed
|
||||
- name: Clean up installation
|
||||
run: |
|
||||
rm -rf $RISCV/buildroot/ || sudo rm -rf $RISCV/buildroot/
|
||||
rm -rf $RISCV/logs || sudo rm -rf $RISCV/logs
|
||||
df -h
|
||||
# Make riscof and zsbl only as that is the only testsuite used by standard regression
|
||||
- name: make tests
|
||||
run: |
|
||||
source setup.sh
|
||||
make riscof zsbl --jobs $(nproc --ignore 1)
|
||||
# Only the linux-testvectors are needed, so remove the rest of the buildroot to save space
|
||||
- name: Remove Buildroot to Save Space
|
||||
run: |
|
||||
rm -rf $RISCV/buildroot/output/build || sudo rm -rf $RISCV/buildroot/output/build
|
||||
df -h
|
||||
# Run standard regression, skipping distros that are known to be broken with Verilator
|
||||
- name: Regression
|
||||
if: ${{ matrix.regressionFail != true }}
|
||||
|
|
36
.github/workflows/lint.yml
vendored
Normal file
36
.github/workflows/lint.yml
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
name: Lint
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- '**/*.py'
|
||||
- 'bin/*'
|
||||
- 'sim/vcs/run_vcs'
|
||||
- '.ruff.toml'
|
||||
- '!addins/*'
|
||||
- '!tests/wally-riscv-arch-test/riscv-test-suite/rv64i_m/Q/*'
|
||||
- '!tests/fp/quad/fpdatasetgen.py'
|
||||
pull_request:
|
||||
paths:
|
||||
- '**/*.py'
|
||||
- 'bin/*'
|
||||
- 'sim/vcs/run_vcs'
|
||||
- '.ruff.toml'
|
||||
- '!addins/*'
|
||||
- '!tests/wally-riscv-arch-test/riscv-test-suite/rv64i_m/Q/*'
|
||||
- '!tests/fp/quad/fpdatasetgen.py'
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Python ${{matrix.version}} lint
|
||||
strategy:
|
||||
matrix:
|
||||
version: [39, 312] # Test on oldest and newest verions used in wally-package-install.sh
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set Python version
|
||||
run: sed -i '/^target-version/c\target-version = "py${{matrix.version}}"' .ruff.toml
|
||||
- name: Run ruff
|
||||
uses: astral-sh/ruff-action@v3
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -161,6 +161,12 @@ examples/asm/sumtest/sumtest
|
|||
examples/asm/example/example
|
||||
examples/asm/trap/trap
|
||||
examples/asm/etc/pause
|
||||
examples/C/fmul
|
||||
examples/exercises/fma16/fma16.sv
|
||||
examples/exercises/fma16/fma16_testgen
|
||||
examples/exercises/fma16/sol
|
||||
examples/exercises/riscvsoc_solutions
|
||||
|
||||
|
||||
# Other
|
||||
external
|
||||
|
|
31
.ruff.toml
Normal file
31
.ruff.toml
Normal file
|
@ -0,0 +1,31 @@
|
|||
# Lint all .py files and extra python scripts without extensions
|
||||
include = ["*.py", "bin/wsim", "bin/regression-wally", "bin/iterelf", "sim/vcs/run_vcs"]
|
||||
exclude = ["addins/*", "tests/wally-riscv-arch-test/riscv-test-suite/rv64i_m/Q/*", "tests/fp/quad/fpdatasetgen.py"]
|
||||
|
||||
# Target oldest version of Python used (Python 3.9 for Ubuntu 20.04 LTS)
|
||||
target-version = "py39"
|
||||
|
||||
line-length=250
|
||||
|
||||
[lint]
|
||||
select = [
|
||||
"F", # various basic rules
|
||||
"E101", # indentation contains mixed spaces and tabs
|
||||
"E4", # imports
|
||||
"E7", # various improvements
|
||||
"E9", # error
|
||||
"W1", # tabs used instead of spaces
|
||||
"W292", # no newline at end of file
|
||||
"UP", # Upgraded version available in newer Python
|
||||
"EXE", # Executable file shebangs
|
||||
"Q003", # Avoidable escaped quotes
|
||||
"Q004", # Unnecessary esacpe character
|
||||
"RUF", # Ruff specific rules
|
||||
]
|
||||
|
||||
ignore = [
|
||||
"E701", "E702", # multiple statements on one line
|
||||
"E722", # do not use bare 'except'
|
||||
"E74", # ambiguous name
|
||||
"RUF005", # iterable unpacking in list
|
||||
]
|
2
LICENSE
2
LICENSE
|
@ -1,4 +1,4 @@
|
|||
// Copyright (C) 2021-23 Harvey Mudd College & Oklahoma State University
|
||||
// Copyright (C) 2021-25 Harvey Mudd College & Oklahoma State University
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
|
||||
//
|
||||
|
|
2
Makefile
2
Makefile
|
@ -8,7 +8,7 @@ SIM = ${WALLY}/sim
|
|||
|
||||
.PHONY: all riscof testfloat combined_IF_vectors zsbl benchmarks coremark embench coverage cvw-arch-verif clean
|
||||
|
||||
all: riscof testfloat combined_IF_vectors zsbl coverage cvw-arch-verif # benchmarks
|
||||
all: riscof testfloat combined_IF_vectors zsbl coverage # cvw-arch-verif benchmarks
|
||||
|
||||
# riscof builds the riscv-arch-test and wally-riscv-arch-test suites
|
||||
riscof:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||

|
||||
[](https://github.com/openhwgroup/cvw/actions/workflows/install.yml)
|
||||
|
||||
# core-v-wally
|
||||
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 66b675017878032974c537ab7aa81758b9812530
|
||||
Subproject commit c3700b49662bb9997891c75903e9eb1314c72e58
|
|
@ -65,10 +65,7 @@ with open(resultfile, mode='w', newline='') as csvfile:
|
|||
|
||||
# Loop through each architecture and run the make commands
|
||||
for arch in arch_list:
|
||||
if(str in arch):
|
||||
xlen_value='32'
|
||||
else:
|
||||
xlen_value='64'
|
||||
xlen_value = "32" if str in arch else "64"
|
||||
os.system("make clean")
|
||||
make_all = f"make all XLEN={xlen_value} ARCH={arch}"
|
||||
os.system(make_all)
|
||||
|
|
|
@ -30,12 +30,8 @@ def tabulate_arch_sweep(directory):
|
|||
file = case+"_"+arch+".json"
|
||||
file_path = os.path.join(directory, file)
|
||||
lines = []
|
||||
try:
|
||||
f = open(file_path, "r")
|
||||
with open(file_path) as f:
|
||||
lines = f.readlines()
|
||||
except:
|
||||
f.close()
|
||||
#print(file_path+" does not exist")
|
||||
for line in lines:
|
||||
#print("File: "+file+" Line: "+line)
|
||||
#p = re.compile('".*" : .*,')
|
||||
|
@ -43,8 +39,8 @@ def tabulate_arch_sweep(directory):
|
|||
match = re.search(p, line)
|
||||
if match:
|
||||
prog = match.group(1)
|
||||
result = match.group(2);
|
||||
d[arch][prog] = result;
|
||||
result = match.group(2)
|
||||
d[arch][prog] = result
|
||||
#print(match.group(1)+" " + match.group(2))
|
||||
f.close()
|
||||
for arch in [""] + archs:
|
||||
|
@ -53,7 +49,7 @@ def tabulate_arch_sweep(directory):
|
|||
for prog in d[archs[0]]:
|
||||
print(prog, end="\t")
|
||||
for arch in archs:
|
||||
entry = d[arch].get(prog, "n/a");
|
||||
entry = d[arch].get(prog, "n/a")
|
||||
print (entry, end="\t")
|
||||
print("")
|
||||
print("New geo mean", end="\t")
|
||||
|
@ -84,4 +80,4 @@ def run_arch_sweep():
|
|||
|
||||
directory = run_arch_sweep()
|
||||
#directory = "run_20231120_072037-caches"
|
||||
tabulate_arch_sweep(directory)
|
||||
tabulate_arch_sweep(directory)
|
||||
|
|
|
@ -10,13 +10,13 @@ from plotly.subplots import make_subplots
|
|||
|
||||
debug = True
|
||||
|
||||
def loadCoremark():
|
||||
def loadCoremark(coremarkData):
|
||||
"""loads the coremark data dictionary"""
|
||||
coremarkPath = "riscv-coremark/work/coremark.sim.log"
|
||||
|
||||
keywordlist = ["CoreMark 1.0", "CoreMark Size", "MTIME", "MINSTRET", "Branches Miss Predictions", "BTB Misses"]
|
||||
for keyword in keywordlist:
|
||||
bashInst = "cat " + coremarkPath + " | grep \"" + keyword + "\" | cut -d \':\' -f 2 | cut -d \" \" -f 2 | tail -1"
|
||||
bashInst = "cat " + coremarkPath + ' | grep "' + keyword + "\" | cut -d ':' -f 2 | cut -d \" \" -f 2 | tail -1"
|
||||
result = subprocess.run(bashInst, stdout=subprocess.PIPE, shell=True)
|
||||
if (debug): print(result)
|
||||
coremarkData[keyword] = int(result.stdout)
|
||||
|
@ -25,8 +25,8 @@ def loadCoremark():
|
|||
|
||||
def loadEmbench(embenchPath, embenchData):
|
||||
"""loads the embench data dictionary"""
|
||||
f = open(embenchPath)
|
||||
embenchData = json.load(f)
|
||||
with open(embenchPath) as f:
|
||||
embenchData = json.load(f)
|
||||
if (debug): print(embenchData)
|
||||
return embenchData
|
||||
|
||||
|
@ -93,7 +93,7 @@ def main():
|
|||
embenchSpeedOpt_SpeedData = {}
|
||||
embenchSizeOpt_SizeData = {}
|
||||
embenchSpeedOpt_SizeData = {}
|
||||
# coremarkData = loadCoremark()
|
||||
coremarkData = loadCoremark(coremarkData)
|
||||
embenchSpeedOpt_SpeedData = loadEmbench("embench/wallySpeedOpt_speed.json", embenchSpeedOpt_SpeedData)
|
||||
embenchSizeOpt_SpeedData = loadEmbench("embench/wallySizeOpt_speed.json", embenchSizeOpt_SpeedData)
|
||||
embenchSpeedOpt_SizeData = loadEmbench("embench/wallySpeedOpt_size.json", embenchSpeedOpt_SizeData)
|
||||
|
@ -104,4 +104,4 @@ def main():
|
|||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
||||
# "ls -Art ../addins/embench-iot/logs/*speed* | tail -n 1 " # gets most recent embench speed log
|
||||
# "ls -Art ../addins/embench-iot/logs/*speed* | tail -n 1 " # gets most recent embench speed log
|
||||
|
|
100
bin/CacheSim.py
100
bin/CacheSim.py
|
@ -16,19 +16,19 @@
|
|||
##
|
||||
## SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
|
||||
##
|
||||
## Licensed under the Solderpad Hardware License v 2.1 (the “License”); you may not use this file
|
||||
## except in compliance with the License, or, at your option, the Apache License version 2.0. You
|
||||
## Licensed under the Solderpad Hardware License v 2.1 (the “License”); you may not use this file
|
||||
## except in compliance with the License, or, at your option, the Apache License version 2.0. You
|
||||
## may obtain a copy of the License at
|
||||
##
|
||||
## https:##solderpad.org/licenses/SHL-2.1/
|
||||
##
|
||||
## Unless required by applicable law or agreed to in writing, any work distributed under the
|
||||
## License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
## either express or implied. See the License for the specific language governing permissions
|
||||
## Unless required by applicable law or agreed to in writing, any work distributed under the
|
||||
## License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
## either express or implied. See the License for the specific language governing permissions
|
||||
## and limitations under the License.
|
||||
################################################################################################
|
||||
|
||||
# how to invoke this simulator:
|
||||
# how to invoke this simulator:
|
||||
# CacheSim.py <number of lines> <number of ways> <length of physical address> <length of tag> -f <log file> (-v)
|
||||
# so the default invocation for rv64gc is 'CacheSim.py 64 4 56 44 -f <log file>'
|
||||
# the log files to run this simulator on can be generated from testbench.sv
|
||||
|
@ -37,25 +37,26 @@
|
|||
# This helps avoid unexpected logger behavior.
|
||||
# With verbose mode off, the simulator only reports mismatches between its and Wally's behavior.
|
||||
# With verbose mode on, the simulator logs each access into the cache.
|
||||
# Add -p or --perf to report the hit/miss ratio.
|
||||
# Add -p or --perf to report the hit/miss ratio.
|
||||
# Add -d or --dist to report the distribution of loads, stores, and atomic ops.
|
||||
# These distributions may not add up to 100; this is because of flushes or invalidations.
|
||||
|
||||
import math
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
class CacheLine:
|
||||
def __init__(self):
|
||||
self.tag = 0
|
||||
self.valid = False
|
||||
self.dirty = False
|
||||
|
||||
|
||||
def __str__(self):
|
||||
string = "(V: " + str(self.valid) + ", D: " + str(self.dirty)
|
||||
string += ", Tag: " + str(hex(self.tag)) + ")"
|
||||
string = f"(V: {self.valid}, D: {self.dirty}"
|
||||
string += f", Tag: {hex(self.tag)})"
|
||||
return string
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
|
@ -72,13 +73,13 @@ class Cache:
|
|||
self.ways = []
|
||||
for i in range(numways):
|
||||
self.ways.append([])
|
||||
for j in range(numsets):
|
||||
for _ in range(numsets):
|
||||
self.ways[i].append(CacheLine())
|
||||
|
||||
|
||||
self.pLRU = []
|
||||
for i in range(self.numsets):
|
||||
self.pLRU.append([0]*(self.numways-1))
|
||||
|
||||
|
||||
# flushes the cache by setting all dirty bits to False
|
||||
def flush(self):
|
||||
for way in self.ways:
|
||||
|
@ -92,20 +93,21 @@ class Cache:
|
|||
line = self.ways[waynum][setnum]
|
||||
if line.tag == tag and line.valid:
|
||||
line.dirty = 0
|
||||
if invalidate: line.valid = 0
|
||||
|
||||
if invalidate:
|
||||
line.valid = 0
|
||||
|
||||
# invalidates the cache by setting all valid bits to False
|
||||
def invalidate(self):
|
||||
for way in self.ways:
|
||||
for line in way:
|
||||
line.valid = False
|
||||
|
||||
|
||||
# resets the pLRU to a fresh 2-D array of 0s
|
||||
def clear_pLRU(self):
|
||||
self.pLRU = []
|
||||
for i in range(self.numsets):
|
||||
for _ in range(self.numsets):
|
||||
self.pLRU.append([0]*(self.numways-1))
|
||||
|
||||
|
||||
# splits the given address into tag, set, and offset
|
||||
def splitaddr(self, addr):
|
||||
# no need for offset in the sim, but it's here for debug
|
||||
|
@ -113,7 +115,7 @@ class Cache:
|
|||
setnum = (addr >> self.offsetlen) & int('1'*self.setlen, 2)
|
||||
offset = addr & int('1'*self.offsetlen, 2)
|
||||
return tag, setnum, offset
|
||||
|
||||
|
||||
# performs a cache access with the given address.
|
||||
# returns a character representing the outcome:
|
||||
# H/M/E/D - hit, miss, eviction, or eviction with writeback
|
||||
|
@ -138,7 +140,7 @@ class Cache:
|
|||
line.dirty = write
|
||||
self.update_pLRU(waynum, setnum)
|
||||
return 'M'
|
||||
|
||||
|
||||
# we need to evict. Select a victim and overwrite.
|
||||
victim = self.getvictimway(setnum)
|
||||
line = self.ways[victim][setnum]
|
||||
|
@ -154,14 +156,14 @@ class Cache:
|
|||
def update_pLRU(self, waynum, setnum):
|
||||
if self.numways == 1:
|
||||
return
|
||||
|
||||
|
||||
tree = self.pLRU[setnum]
|
||||
bottomrow = (self.numways - 1)//2
|
||||
index = (waynum // 2) + bottomrow
|
||||
tree[index] = int(not (waynum % 2))
|
||||
tree[index] = int(not waynum % 2)
|
||||
while index > 0:
|
||||
parent = (index-1) // 2
|
||||
tree[parent] = index % 2
|
||||
tree[parent] = index % 2
|
||||
index = parent
|
||||
|
||||
# uses the psuedo-LRU tree to select
|
||||
|
@ -170,7 +172,7 @@ class Cache:
|
|||
def getvictimway(self, setnum):
|
||||
if self.numways == 1:
|
||||
return 0
|
||||
|
||||
|
||||
tree = self.pLRU[setnum]
|
||||
index = 0
|
||||
bottomrow = (self.numways - 1) // 2 #first index on the bottom row of the tree
|
||||
|
@ -180,28 +182,28 @@ class Cache:
|
|||
index = index*2 + 1
|
||||
else: #tree[index] == 1
|
||||
# Go to the right child
|
||||
index = index*2 + 2
|
||||
|
||||
index = index*2 + 2
|
||||
|
||||
victim = (index - bottomrow)*2
|
||||
if tree[index] == 1:
|
||||
victim += 1
|
||||
|
||||
|
||||
return victim
|
||||
|
||||
|
||||
def __str__(self):
|
||||
string = ""
|
||||
for i in range(self.numways):
|
||||
string += "Way " + str(i) + ": "
|
||||
string += f"Way {i}: "
|
||||
for line in self.ways[i]:
|
||||
string += str(line) + ", "
|
||||
string += f"{line}, "
|
||||
string += "\n\n"
|
||||
return string
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
def parseArgs():
|
||||
parser = argparse.ArgumentParser(description="Simulates a L1 cache.")
|
||||
parser.add_argument('numlines', type=int, help="The number of lines per way (a power of 2)", metavar="L")
|
||||
parser.add_argument('numways', type=int, help="The number of ways (a power of 2)", metavar='W')
|
||||
|
@ -211,8 +213,9 @@ def main():
|
|||
parser.add_argument('-v', "--verbose", action='store_true', help="verbose/full-trace mode")
|
||||
parser.add_argument('-p', "--perf", action='store_true', help="Report hit/miss ratio")
|
||||
parser.add_argument('-d', "--dist", action='store_true', help="Report distribution of operations")
|
||||
return parser.parse_args()
|
||||
|
||||
args = parser.parse_args()
|
||||
def main(args):
|
||||
cache = Cache(args.numlines, args.numways, args.addrlen, args.taglen)
|
||||
extfile = os.path.expanduser(args.file)
|
||||
mismatches = 0
|
||||
|
@ -227,7 +230,7 @@ def main():
|
|||
atoms = 0
|
||||
totalops = 0
|
||||
|
||||
with open(extfile, "r") as f:
|
||||
with open(extfile) as f:
|
||||
for ln in f:
|
||||
ln = ln.strip()
|
||||
lninfo = ln.split()
|
||||
|
@ -239,11 +242,11 @@ def main():
|
|||
cache.clear_pLRU()
|
||||
if args.verbose:
|
||||
print("New Test")
|
||||
|
||||
|
||||
else:
|
||||
if args.dist:
|
||||
totalops += 1
|
||||
|
||||
|
||||
if lninfo[1] == 'F':
|
||||
cache.flush()
|
||||
if args.verbose:
|
||||
|
@ -257,22 +260,22 @@ def main():
|
|||
IsCBOClean = lninfo[1] != 'C'
|
||||
cache.cbo(addr, IsCBOClean)
|
||||
if args.verbose:
|
||||
print(lninfo[1]);
|
||||
print(lninfo[1])
|
||||
else:
|
||||
addr = int(lninfo[0], 16)
|
||||
iswrite = lninfo[1] == 'W' or lninfo[1] == 'A' or lninfo[1] == 'Z'
|
||||
result = cache.cacheaccess(addr, iswrite)
|
||||
|
||||
|
||||
if args.verbose:
|
||||
tag, setnum, offset = cache.splitaddr(addr)
|
||||
print(hex(addr), hex(tag), hex(setnum), hex(offset), lninfo[2], result)
|
||||
|
||||
|
||||
if args.perf:
|
||||
if result == 'H':
|
||||
hits += 1
|
||||
else:
|
||||
misses += 1
|
||||
|
||||
|
||||
if args.dist:
|
||||
if lninfo[1] == 'R':
|
||||
loads += 1
|
||||
|
@ -280,23 +283,24 @@ def main():
|
|||
stores += 1
|
||||
elif lninfo[1] == 'A':
|
||||
atoms += 1
|
||||
|
||||
if not result == lninfo[2]:
|
||||
print("Result mismatch at address", lninfo[0]+ ". Wally:", lninfo[2]+", Sim:", result)
|
||||
|
||||
if result != lninfo[2]:
|
||||
print(f"Result mismatch at address {lninfo[0]}. Wally: {lninfo[2]}, Sim: {result}")
|
||||
mismatches += 1
|
||||
if args.dist:
|
||||
percent_loads = str(round(100*loads/totalops))
|
||||
percent_stores = str(round(100*stores/totalops))
|
||||
percent_atoms = str(round(100*atoms/totalops))
|
||||
print("This log had", percent_loads+"% loads,", percent_stores+"% stores, and", percent_atoms+"% atomic operations.")
|
||||
|
||||
print(f"This log had {percent_loads}% loads, {percent_stores}% stores, and {percent_atoms}% atomic operations.")
|
||||
|
||||
if args.perf:
|
||||
ratio = round(hits/misses,3)
|
||||
print("There were", hits, "hits and", misses, "misses. The hit/miss ratio was", str(ratio)+".")
|
||||
|
||||
|
||||
if mismatches == 0:
|
||||
print("SUCCESS! There were no mismatches between Wally and the sim.")
|
||||
return mismatches
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
args = parseArgs()
|
||||
sys.exit(main(args))
|
||||
|
|
74
bin/iterelf
74
bin/iterelf
|
@ -1,4 +1,4 @@
|
|||
#!/usr/bin/python3
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# iterelf
|
||||
# David_Harris@hmc.edu and Rose Thompson 7/3/2024
|
||||
|
@ -7,20 +7,21 @@
|
|||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
import multiprocessing
|
||||
from multiprocessing import Pool, TimeoutError
|
||||
from multiprocessing import Pool, TimeoutError as MPTimeoutError
|
||||
TIMEOUT_DUR = 60 # 1` minute
|
||||
|
||||
class bcolors:
|
||||
HEADER = '\033[95m'
|
||||
OKBLUE = '\033[94m'
|
||||
OKCYAN = '\033[96m'
|
||||
OKGREEN = '\033[92m'
|
||||
WARNING = '\033[93m'
|
||||
FAIL = '\033[91m'
|
||||
ENDC = '\033[0m'
|
||||
BOLD = '\033[1m'
|
||||
UNDERLINE = '\033[4m'
|
||||
HEADER = "\033[95m"
|
||||
OKBLUE = "\033[94m"
|
||||
OKCYAN = "\033[96m"
|
||||
OKGREEN = "\033[92m"
|
||||
WARNING = "\033[93m"
|
||||
FAIL = "\033[91m"
|
||||
ENDC = "\033[0m"
|
||||
BOLD = "\033[1m"
|
||||
UNDERLINE = "\033[4m"
|
||||
|
||||
def search_log_for_mismatches(logfile):
|
||||
"""Search through the given log file for text, returning True if it is found or False if it is not"""
|
||||
|
@ -28,33 +29,32 @@ def search_log_for_mismatches(logfile):
|
|||
os.system(grepwarn)
|
||||
greperr = "grep -H Error: " + logfile
|
||||
os.system(greperr)
|
||||
grepcmd = "grep -a -e 'Mismatches : 0' '%s' > /dev/null" % logfile
|
||||
# print(" search_log_for_text invoking %s" % grepcmd)
|
||||
grepcmd = f"grep -a -e 'Mismatches : 0' '{logfile}' > /dev/null"
|
||||
return os.system(grepcmd) == 0
|
||||
|
||||
def run_test_case(elf):
|
||||
"""Run the given test case, and return 0 if the test suceeds and 1 if it fails"""
|
||||
WALLY = os.environ.get('WALLY')
|
||||
fields = elf.rsplit('/', 3)
|
||||
if (fields[2] == "ref"):
|
||||
WALLY = os.environ.get("WALLY")
|
||||
fields = elf.rsplit("/", 3)
|
||||
if fields[2] == "ref":
|
||||
shortelf = fields[1] + "_" + fields[3]
|
||||
else:
|
||||
shortelf = fields[2] + "_" + fields[3]
|
||||
# shortelf = fields[1] + "_" + fields[2]
|
||||
# shortelf = fields[1] + "_" + fields[2]
|
||||
logfile = WALLY + "/sim/" + args.sim + "/logs/" + shortelf + ".log"
|
||||
cmd = "wsim " + args.config + " " + shortelf + " --elf " + elf + " --sim " + args.sim + " --lockstep > " + logfile # add coveerage flags if necessary
|
||||
# print("cmd = " + cmd)
|
||||
cmd = "wsim " + args.config + " " + shortelf + " --elf " + elf + " --sim " + args.sim + " --lockstep > " + logfile # add coveerage flags if necessary
|
||||
# print("cmd = " + cmd)
|
||||
os.system(cmd)
|
||||
if search_log_for_mismatches(logfile):
|
||||
print(f"{bcolors.OKGREEN}%s: Success{bcolors.ENDC}" % (cmd))
|
||||
print(f"{bcolors.OKGREEN}{cmd}: Success{bcolors.ENDC}")
|
||||
return 0
|
||||
elif("WALLY-cbom-01" in elf):
|
||||
elif "WALLY-cbom-01" in elf:
|
||||
# Remove this when CBO instructions are modeled in ImperasDV
|
||||
print(f"{bcolors.OKCYAN}%s: Expected mismatch because ImperasDV does not yet model cache for CBO instructions {bcolors.ENDC}" % (cmd))
|
||||
print(f"{bcolors.OKCYAN}{cmd}: Expected mismatch because ImperasDV does not yet model cache for CBO instructions {bcolors.ENDC}")
|
||||
return 0
|
||||
else:
|
||||
print(f"{bcolors.FAIL}%s: Failures detected in output{bcolors.ENDC}" % (cmd))
|
||||
print(" Check %s" % logfile)
|
||||
print(f"{bcolors.FAIL}{cmd}: Failures detected in output{bcolors.ENDC}")
|
||||
print(f" Check {logfile}")
|
||||
return 1
|
||||
|
||||
##################################
|
||||
|
@ -74,34 +74,32 @@ args = parser.parse_args()
|
|||
# find all ELF files in directory
|
||||
|
||||
ElfList = []
|
||||
if (os.path.isdir(args.dir)):
|
||||
if os.path.isdir(args.dir):
|
||||
DirectorMode = 1
|
||||
for dirpath, dirnames, filenames in os.walk(os.path.abspath(args.dir)):
|
||||
for file in filenames:
|
||||
if (file.endswith("elf") and not file.endswith(args.exclude)):
|
||||
if file.endswith("elf") and not file.endswith(args.exclude):
|
||||
ElfList.append(os.path.join(dirpath, file))
|
||||
else:
|
||||
print(args.dir + " is not a directory")
|
||||
exit(1)
|
||||
#print(ElfList)
|
||||
sys.exit(1)
|
||||
|
||||
# spawn parallel wsim jobs for each ELF file
|
||||
|
||||
ImperasDVLicenseCount = 8
|
||||
with Pool(processes=min(len(ElfList),multiprocessing.cpu_count(), ImperasDVLicenseCount)) as pool:
|
||||
with Pool(processes=min(len(ElfList), multiprocessing.cpu_count(), ImperasDVLicenseCount)) as pool:
|
||||
num_fail = 0
|
||||
results = {}
|
||||
for elf in ElfList:
|
||||
results[elf] = pool.apply_async(run_test_case,(elf,))
|
||||
for (elf,result) in results.items():
|
||||
results[elf] = pool.apply_async(run_test_case, (elf,))
|
||||
for elf, result in results.items():
|
||||
try:
|
||||
num_fail+=result.get(timeout=TIMEOUT_DUR)
|
||||
except TimeoutError:
|
||||
num_fail+=1
|
||||
print(f"{bcolors.FAIL}%s: Timeout - runtime exceeded %d seconds{bcolors.ENDC}" % (elf, TIMEOUT_DUR))
|
||||
num_fail += result.get(timeout=TIMEOUT_DUR)
|
||||
except MPTimeoutError:
|
||||
num_fail += 1
|
||||
print(f"{bcolors.FAIL}{elf}: Timeout - runtime exceeded {TIMEOUT_DUR} seconds{bcolors.ENDC}")
|
||||
|
||||
if (num_fail == 0):
|
||||
if num_fail == 0:
|
||||
print(f"{bcolors.OKGREEN}SUCCESS! All tests ran without failures{bcolors.ENDC}")
|
||||
else:
|
||||
print(f"{bcolors.FAIL}Completed %d tests with %d failures{bcolors.ENDC}" % (len(ElfList), num_fail))
|
||||
|
||||
print(f"{bcolors.FAIL}Completed {len(ElfList)} tests with {num_fail} failures{bcolors.ENDC}")
|
||||
|
|
|
@ -68,7 +68,7 @@ In summary, this Python script facilitates the automation of nightly regression
|
|||
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime
|
||||
import time
|
||||
import re
|
||||
import markdown
|
||||
|
@ -78,9 +78,6 @@ import logging
|
|||
from pathlib import Path
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class FolderManager:
|
||||
"""A class for managing folders and repository cloning."""
|
||||
|
||||
|
@ -115,9 +112,6 @@ class FolderManager:
|
|||
"""
|
||||
|
||||
for folder in folders:
|
||||
folder_path = os.path.join(self.base_parent_dir, folder)
|
||||
# if not os.path.exists(folder_path):
|
||||
# os.makedirs(folder_path)
|
||||
if not os.path.exists(folder):
|
||||
os.makedirs(folder)
|
||||
|
||||
|
@ -171,7 +165,6 @@ class FolderManager:
|
|||
Returns:
|
||||
None
|
||||
"""
|
||||
todays_date = datetime.now().strftime("%Y-%m-%d")
|
||||
cvw = folder.joinpath("cvw")
|
||||
tmp_folder = os.path.join(cvw, "tmp") # temprorary files will be stored in here
|
||||
if not cvw.exists():
|
||||
|
@ -287,7 +280,7 @@ class TestRunner:
|
|||
|
||||
if target:
|
||||
output_file = self.log_dir.joinpath(f"make-{target}-output.log")
|
||||
else: output_file = self.log_dir.joinpath(f"make-output.log")
|
||||
else: output_file = self.log_dir.joinpath("make-output.log")
|
||||
|
||||
# Source setup script and execute make with target and cores/2
|
||||
if target:
|
||||
|
@ -398,7 +391,7 @@ class TestRunner:
|
|||
# Implement cleaning and formatting logic here
|
||||
|
||||
# Open up the file with only read permissions
|
||||
with open(input_file, 'r') as input_file:
|
||||
with open(input_file) as input_file:
|
||||
uncleaned_output = input_file.read()
|
||||
|
||||
# use something like this function to detect pass and fail
|
||||
|
@ -461,7 +454,6 @@ class TestRunner:
|
|||
None
|
||||
"""
|
||||
# Implement markdown rewriting logic here
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
# output_directory = self.base_parent_dir.joinpath("results")
|
||||
os.chdir(self.results_dir)
|
||||
|
@ -470,7 +462,7 @@ class TestRunner:
|
|||
|
||||
|
||||
with open(output_file, 'w') as md_file:
|
||||
|
||||
|
||||
# Title
|
||||
md_file.write(f"\n\n# Regression Test Results - {self.todays_date}\n\n")
|
||||
#md_file.write(f"\n\n<div class=\"regression\">\n# Regression Test Results - {timestamp}\n</div>\n\n")
|
||||
|
@ -481,15 +473,15 @@ class TestRunner:
|
|||
if failed_configs:
|
||||
md_file.write("## Failed Configurations\n\n")
|
||||
for config, log_file in failed_configs:
|
||||
md_file.write(f"- <span class=\"failure\" style=\"color: red;\">{config}</span> ({log_file})\n")
|
||||
md_file.write(f'- <span class="failure" style="color: red;">{config}</span> ({log_file})\n')
|
||||
md_file.write("\n")
|
||||
else:
|
||||
md_file.write("## Failed Configurations\n")
|
||||
md_file.write(f"No Failures\n")
|
||||
md_file.write("No Failures\n")
|
||||
|
||||
md_file.write("\n## Passed Configurations\n")
|
||||
for config in passed_configs:
|
||||
md_file.write(f"- <span class=\"success\" style=\"color: green;\">{config}</span>\n")
|
||||
md_file.write(f'- <span class="success" style="color: green;">{config}</span>\n')
|
||||
|
||||
self.logger.info("writing test outputs to markdown")
|
||||
|
||||
|
@ -534,7 +526,7 @@ class TestRunner:
|
|||
md_file.write("\n")
|
||||
except subprocess.CalledProcessError as e:
|
||||
# Handle if the command fails
|
||||
md_file.write(f"Failed to identify host and Operating System information: {str(e)}")
|
||||
md_file.write(f"Failed to identify host and Operating System information: {e!s}")
|
||||
|
||||
# Which tests did we run
|
||||
md_file.write(f"\n**Tests made:** `make {test_type}`\n")
|
||||
|
@ -545,18 +537,18 @@ class TestRunner:
|
|||
md_file.write(f"**Total Failures: {total_number_failures}**\n")
|
||||
|
||||
# Failed Tests
|
||||
md_file.write(f"\n\n## Failed Tests")
|
||||
md_file.write("\n\n## Failed Tests")
|
||||
md_file.write(f"\n**Total failed tests: {total_number_failures}**")
|
||||
for (test_item, item) in zip(test_list, failed_tests):
|
||||
md_file.write(f"\n\n### {test_item[1]} test")
|
||||
md_file.write(f"\n**Command used:** {test_item[0]} {test_item[1]} {' '.join(test_item[2])}\n\n")
|
||||
md_file.write(f"**Failed Tests:**\n")
|
||||
md_file.write("**Failed Tests:**\n")
|
||||
|
||||
|
||||
|
||||
if len(item) == 0:
|
||||
md_file.write("\n")
|
||||
md_file.write(f"* <span class=\"no-failure\" style=\"color: green;\">No failures</span>\n")
|
||||
md_file.write('* <span class="no-failure" style="color: green;">No failures</span>\n')
|
||||
md_file.write("\n")
|
||||
else:
|
||||
for failed_test in item:
|
||||
|
@ -564,29 +556,29 @@ class TestRunner:
|
|||
log_file = failed_test[1]
|
||||
|
||||
md_file.write("\n")
|
||||
md_file.write(f"* <span class=\"failure\" style=\"color: red;\">{config}</span> ({log_file})\n")
|
||||
md_file.write(f'* <span class="failure" style="color: red;">{config}</span> ({log_file})\n')
|
||||
md_file.write("\n")
|
||||
# Successful Tests
|
||||
|
||||
md_file.write(f"\n\n## Successful Tests")
|
||||
md_file.write("\n\n## Successful Tests")
|
||||
md_file.write(f"\n**Total successful tests: {total_number_success}**")
|
||||
for (test_item, item) in zip(test_list, passed_tests):
|
||||
md_file.write(f"\n\n### {test_item[1]} test")
|
||||
md_file.write(f"\n**Command used:** {test_item[0]} {test_item[1]} {' '.join(test_item[2])}\n\n")
|
||||
md_file.write(f"\n**Successful Tests:**\n")
|
||||
md_file.write("\n**Successful Tests:**\n")
|
||||
|
||||
|
||||
|
||||
if len(item) == 0:
|
||||
md_file.write("\n")
|
||||
md_file.write(f"* <span class=\"no-successes\" style=\"color: red;\">No successes</span>\n")
|
||||
md_file.write('* <span class="no-successes" style="color: red;">No successes</span>\n')
|
||||
md_file.write("\n")
|
||||
else:
|
||||
for passed_tests in item:
|
||||
config = passed_tests
|
||||
|
||||
md_file.write("\n")
|
||||
md_file.write(f"* <span class=\"success\" style=\"color: green;\">{config}</span>\n")
|
||||
md_file.write(f'* <span class="success" style="color: green;">{config}</span>\n')
|
||||
md_file.write("\n")
|
||||
|
||||
self.logger.info("Combining markdown files")
|
||||
|
@ -606,7 +598,7 @@ class TestRunner:
|
|||
# Implement markdown to HTML conversion logic here
|
||||
os.chdir(self.results_dir)
|
||||
|
||||
with open(markdown_file, 'r') as md_file:
|
||||
with open(markdown_file) as md_file:
|
||||
md_content = md_file.read()
|
||||
html_content = markdown.markdown(md_content)
|
||||
|
||||
|
@ -614,7 +606,7 @@ class TestRunner:
|
|||
html_file.write(html_content)
|
||||
|
||||
self.logger.info("Converting markdown file to html file.")
|
||||
|
||||
|
||||
def send_email(self, receiver_emails=None, subject="Nightly Regression Test"):
|
||||
"""
|
||||
Send email with HTML content.
|
||||
|
@ -640,7 +632,7 @@ class TestRunner:
|
|||
os.chdir(self.results_dir)
|
||||
html_file = "results.html"
|
||||
|
||||
with open(html_file, 'r') as html_file:
|
||||
with open(html_file) as html_file:
|
||||
body = html_file.read()
|
||||
|
||||
try:
|
||||
|
@ -688,13 +680,10 @@ def main():
|
|||
|
||||
# file paths for where the results and repos will be saved: repos and results can be changed to whatever
|
||||
today = datetime.now().strftime("%Y-%m-%d")
|
||||
yesterday_dt = datetime.now() - timedelta(days=1)
|
||||
yesterday = yesterday_dt.strftime("%Y-%m-%d")
|
||||
cvw_path = Path.home().joinpath(args.path, today)
|
||||
results_path = Path.home().joinpath(args.path, today, "results")
|
||||
log_path = Path.home().joinpath(args.path, today, "logs")
|
||||
log_file_path = log_path.joinpath("nightly_build.log")
|
||||
previous_cvw_path = Path.home().joinpath(args.path,f"{yesterday}/cvw")
|
||||
# creates the object
|
||||
folder_manager = FolderManager(basedir=args.path)
|
||||
|
||||
|
@ -765,12 +754,6 @@ def main():
|
|||
|
||||
if args.target != "no":
|
||||
test_runner.execute_makefile(target = args.target, makefile_path=test_runner.cvw)
|
||||
# TODO: remove vestigial code if no longer wanted
|
||||
# if args.target == "all":
|
||||
# # Compile Linux for local testing
|
||||
# test_runner.set_env_var("RISCV",str(test_runner.cvw))
|
||||
# linux_path = test_runner.cvw / "linux"
|
||||
# test_runner.execute_makefile(target = "all", makefile_path=linux_path)
|
||||
|
||||
#############################################
|
||||
# RUN TESTS #
|
||||
|
@ -817,7 +800,7 @@ def main():
|
|||
logger.info(f"The total failures for all tests ran are: {total_number_failures}")
|
||||
|
||||
# Copy actual test logs from sim/questa, sim/verilator, sim/vcs
|
||||
if not args.tests == "test_lint":
|
||||
if args.tests != 'test_lint':
|
||||
test_runner.copy_sim_logs([test_runner.cvw / "sim/questa/logs", test_runner.cvw / "sim/verilator/logs", test_runner.cvw / "sim/vcs/logs"])
|
||||
|
||||
#############################################
|
||||
|
|
|
@ -46,14 +46,14 @@ def ParseBranchListFile(path):
|
|||
is formated in row columns. Each row is a trace with the file, branch predictor type, and the parameters.
|
||||
parameters can be any number and depend on the predictor type. Returns a list of lists.'''
|
||||
lst = []
|
||||
BranchList = open(path, 'r')
|
||||
for line in BranchList:
|
||||
tokens = line.split()
|
||||
predictorLog = os.path.dirname(path) + '/' + tokens[0]
|
||||
predictorType = tokens[1]
|
||||
predictorParams = tokens[2::]
|
||||
lst.append([predictorLog, predictorType, predictorParams])
|
||||
#print(predictorLog, predictorType, predictorParams)
|
||||
with open(path) as BranchList:
|
||||
for line in BranchList:
|
||||
tokens = line.split()
|
||||
predictorLog = os.path.dirname(path) + '/' + tokens[0]
|
||||
predictorType = tokens[1]
|
||||
predictorParams = tokens[2::]
|
||||
lst.append([predictorLog, predictorType, predictorParams])
|
||||
#print(predictorLog, predictorType, predictorParams)
|
||||
return lst
|
||||
|
||||
def ProcessFile(fileName):
|
||||
|
@ -62,22 +62,22 @@ def ProcessFile(fileName):
|
|||
# 1 find lines with Read memfile and extract test name
|
||||
# 2 parse counters into a list of (name, value) tuples (dictionary maybe?)
|
||||
benchmarks = []
|
||||
transcript = open(fileName, 'r')
|
||||
HPMClist = { }
|
||||
testName = ''
|
||||
for line in transcript.readlines():
|
||||
lineToken = line.split()
|
||||
if(len(lineToken) > 3 and lineToken[1] == 'Read' and lineToken[2] == 'memfile'):
|
||||
opt = lineToken[3].split('/')[-4]
|
||||
testName = lineToken[3].split('/')[-1].split('.')[0]
|
||||
HPMClist = { }
|
||||
elif(len(lineToken) > 4 and lineToken[1][0:3] == 'Cnt'):
|
||||
countToken = line.split('=')[1].split()
|
||||
value = int(countToken[0]) if countToken[0] != 'x' else 0
|
||||
name = ' '.join(countToken[1:])
|
||||
HPMClist[name] = value
|
||||
elif ('is done' in line):
|
||||
benchmarks.append((testName, opt, HPMClist))
|
||||
with open(fileName) as transcript:
|
||||
for line in transcript.readlines():
|
||||
lineToken = line.split()
|
||||
if(len(lineToken) > 3 and lineToken[1] == 'Read' and lineToken[2] == 'memfile'):
|
||||
opt = lineToken[3].split('/')[-4]
|
||||
testName = lineToken[3].split('/')[-1].split('.')[0]
|
||||
HPMClist = { }
|
||||
elif(len(lineToken) > 4 and lineToken[1][0:3] == 'Cnt'):
|
||||
countToken = line.split('=')[1].split()
|
||||
value = int(countToken[0]) if countToken[0] != 'x' else 0
|
||||
name = ' '.join(countToken[1:])
|
||||
HPMClist[name] = value
|
||||
elif ('is done' in line):
|
||||
benchmarks.append((testName, opt, HPMClist))
|
||||
return benchmarks
|
||||
|
||||
|
||||
|
@ -227,13 +227,13 @@ def ReportAsTable(benchmarkDict):
|
|||
|
||||
sys.stdout.write('benchmark\t\t')
|
||||
for name in FirstLine:
|
||||
if(len(name) < 8): sys.stdout.write('%s\t\t' % name)
|
||||
else: sys.stdout.write('%s\t' % name)
|
||||
if(len(name) < 8): sys.stdout.write(f'{name}\t\t')
|
||||
else: sys.stdout.write(f'{name}\t')
|
||||
sys.stdout.write('\n')
|
||||
sys.stdout.write('size\t\t\t')
|
||||
for size in SecondLine:
|
||||
if(len(str(size)) < 8): sys.stdout.write('%d\t\t' % size)
|
||||
else: sys.stdout.write('%d\t' % size)
|
||||
if(len(str(size)) < 8): sys.stdout.write(f'{size}\t\t')
|
||||
else: sys.stdout.write(f'{size}\t')
|
||||
sys.stdout.write('\n')
|
||||
|
||||
if(args.summary):
|
||||
|
@ -245,9 +245,9 @@ def ReportAsTable(benchmarkDict):
|
|||
if(not args.summary):
|
||||
for benchmark in benchmarkDict:
|
||||
length = len(benchmark)
|
||||
if(length < 8): sys.stdout.write('%s\t\t\t' % benchmark)
|
||||
elif(length < 16): sys.stdout.write('%s\t\t' % benchmark)
|
||||
else: sys.stdout.write('%s\t' % benchmark)
|
||||
if(length < 8): sys.stdout.write(f'{benchmark}\t\t\t')
|
||||
elif(length < 16): sys.stdout.write(f'{benchmark}\t\t')
|
||||
else: sys.stdout.write(f'{benchmark}\t')
|
||||
for (name, typ, entries, size, val) in benchmarkDict[benchmark]:
|
||||
sys.stdout.write('%0.2f\t\t' % (val if not args.invert else 100 -val))
|
||||
sys.stdout.write('\n')
|
||||
|
@ -256,14 +256,14 @@ def ReportAsText(benchmarkDict):
|
|||
if(args.summary):
|
||||
mean = benchmarkDict['Mean']
|
||||
print('Mean')
|
||||
for (name, typ, entries. size, val) in mean:
|
||||
sys.stdout.write('%s %s %0.2f\n' % (name, entries if not args.size else size, val if not args.invert else 100 - val))
|
||||
for (name, typ, entries, size, val) in mean:
|
||||
sys.stdout.write(f'{name} {entries if not args.size else size} {val if not args.invert else 100 - val:0.2f}\n')
|
||||
|
||||
if(not args.summary):
|
||||
for benchmark in benchmarkDict:
|
||||
print(benchmark)
|
||||
for (name, type, entries, size, val) in benchmarkDict[benchmark]:
|
||||
sys.stdout.write('%s %s %0.2f\n' % (name, entries if not args.size else size, val if not args.invert else 100 - val))
|
||||
sys.stdout.write(f'{name} {entries if not args.size else size} {val if not args.invert else 100 - val:0.2f}\n')
|
||||
|
||||
def Inversion(lst):
|
||||
return [x if not args.invert else 100 - x for x in lst]
|
||||
|
@ -275,7 +275,7 @@ def BarGraph(seriesDict, xlabelList, BenchPerRow, FileName, IncludeLegend):
|
|||
# the space between groups is 1
|
||||
EffectiveNumInGroup = NumberInGroup + 2
|
||||
barWidth = 1 / EffectiveNumInGroup
|
||||
fig = plt.subplots(figsize = (EffectiveNumInGroup*BenchPerRow/8, 4))
|
||||
_ = plt.subplots(figsize = (EffectiveNumInGroup*BenchPerRow/8, 4))
|
||||
colors = ['blue', 'blue', 'blue', 'blue', 'blue', 'blue', 'black', 'black', 'black', 'black', 'black', 'black']
|
||||
for name in seriesDict:
|
||||
values = seriesDict[name]
|
||||
|
@ -322,14 +322,13 @@ def ReportAsGraph(benchmarkDict, bar, FileName):
|
|||
if(args.summary):
|
||||
markers = ['x', '.', '+', '*', '^', 'o', ',', 's']
|
||||
colors = ['blue', 'black', 'gray', 'dodgerblue', 'lightsteelblue', 'turquoise', 'black', 'blue']
|
||||
temp = benchmarkDict['Mean']
|
||||
|
||||
# the benchmarkDict['Mean'] contains sequencies of results for multiple
|
||||
# branch predictors with various parameterizations
|
||||
# group the parameterizations by the common typ.
|
||||
sequencies = {}
|
||||
for (name, typ, entries, size, value) in benchmarkDict['Mean']:
|
||||
if not typ in sequencies:
|
||||
if typ not in sequencies:
|
||||
sequencies[typ] = [(entries if not args.size else int(size/8), value)]
|
||||
else:
|
||||
sequencies[typ].append((entries if not args.size else int(size/8) ,value))
|
||||
|
@ -354,7 +353,7 @@ def ReportAsGraph(benchmarkDict, bar, FileName):
|
|||
axes.set_xticks(xdata)
|
||||
axes.set_xticklabels(xdata)
|
||||
axes.grid(color='b', alpha=0.5, linestyle='dashed', linewidth=0.5)
|
||||
if(FileName == None): plt.show()
|
||||
if FileName is None: plt.show()
|
||||
else: plt.savefig(FileName)
|
||||
|
||||
# if(not args.summary):
|
||||
|
@ -378,10 +377,10 @@ def ReportAsGraph(benchmarkDict, bar, FileName):
|
|||
|
||||
if(not args.summary):
|
||||
NumBenchmarks = len(benchmarkDict)
|
||||
NumBenchmarksSqrt = math.sqrt(NumBenchmarks)
|
||||
isSquare = math.isclose(NumBenchmarksSqrt, round(NumBenchmarksSqrt))
|
||||
numCol = math.floor(NumBenchmarksSqrt)
|
||||
numRow = numCol + (0 if isSquare else 1)
|
||||
# NumBenchmarksSqrt = math.sqrt(NumBenchmarks)
|
||||
# isSquare = math.isclose(NumBenchmarksSqrt, round(NumBenchmarksSqrt))
|
||||
# numCol = math.floor(NumBenchmarksSqrt)
|
||||
# numRow = numCol + (0 if isSquare else 1)
|
||||
index = 1
|
||||
BenchPerRow = 5
|
||||
|
||||
|
@ -414,7 +413,7 @@ def ReportAsGraph(benchmarkDict, bar, FileName):
|
|||
# on each piece.
|
||||
for row in range(0, math.ceil(NumBenchmarks / BenchPerRow)):
|
||||
(xlabelListTrunk, seriesDictTrunk) = SelectPartition(xlabelListBig, seriesDictBig, row, BenchPerRow)
|
||||
FileName = 'barSegment%d.svg' % row
|
||||
FileName = f'barSegment{row}.svg'
|
||||
groupLen = len(xlabelListTrunk)
|
||||
BarGraph(seriesDictTrunk, xlabelListTrunk, groupLen, FileName, (row == 0))
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
# regression-wally
|
||||
# David_Harris@Hmc.edu 25 January 2021
|
||||
# Modified by Jarred Allen <jaallen@g.hmc.edu> and many others
|
||||
# jcarlin@hmc.edu December 2024
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
|
||||
#
|
||||
# Run a regression with multiple configurations in parallel and exit with
|
||||
|
@ -11,65 +12,66 @@
|
|||
# output.
|
||||
#
|
||||
##################################
|
||||
import sys,os,shutil
|
||||
import sys
|
||||
import shutil
|
||||
import os
|
||||
import argparse
|
||||
import multiprocessing
|
||||
from collections import namedtuple
|
||||
from multiprocessing import Pool, TimeoutError
|
||||
from multiprocessing import Pool, TimeoutError as MPTimeoutError
|
||||
|
||||
# Globals
|
||||
WALLY = os.environ.get('WALLY')
|
||||
regressionDir = f'{WALLY}/sim'
|
||||
archVerifDir = f'{WALLY}/addins/cvw-arch-verif'
|
||||
coveragesim = "questa" # Questa is required for code/functional coverage
|
||||
defaultsim = "verilator" # Default simulator for all other tests
|
||||
lockstepsim = "questa"
|
||||
testfloatsim = "questa" # change to Verilator when Issue #707 about testfloat not running Verilator is resolved
|
||||
|
||||
|
||||
##################################
|
||||
# Define lists of configurations and tests to run on each configuration
|
||||
##################################
|
||||
|
||||
# The tests are a list with one element for each configuration
|
||||
# The element consists of the configuration name, a list of test suites to run,
|
||||
# The element consists of the configuration name, a list of test suites to run,
|
||||
# optionally a string to pass to the simulator, and optionally a nonstandard grep string to check for success
|
||||
|
||||
tests = [
|
||||
standard_tests = [
|
||||
["rv32e", ["arch32e"]],
|
||||
["rv32i", ["arch32i"]],
|
||||
["rv32imc", ["arch32i", "arch32c", "arch32m", "wally32periph"]],
|
||||
["rv32gc", ["arch32f", "arch32d", "arch32f_fma", "arch32d_fma", "arch32f_divsqrt", "arch32d_divsqrt",
|
||||
"arch32i", "arch32priv", "arch32c", "arch32m", "arch32a_amo", "arch32zifencei", "arch32zicond",
|
||||
"arch32zba", "arch32zbb", "arch32zbc", "arch32zbs", "arch32zfh", "arch32zfh_fma",
|
||||
["rv32gc", ["arch32f", "arch32d", "arch32f_fma", "arch32d_fma", "arch32f_divsqrt", "arch32d_divsqrt",
|
||||
"arch32i", "arch32priv", "arch32c", "arch32m", "arch32a_amo", "arch32zifencei", "arch32zicond",
|
||||
"arch32zba", "arch32zbb", "arch32zbc", "arch32zbs", "arch32zfh", "arch32zfh_fma",
|
||||
"arch32zfh_divsqrt", "arch32zfaf", "arch32zfad", "wally32a_lrsc", "wally32priv", "wally32periph", "arch32zcb",
|
||||
"arch32zbkb", "arch32zbkc", "arch32zbkx", "arch32zknd", "arch32zkne", "arch32zknh", "arch32vm_sv32", "arch32pmp"]],
|
||||
["rv64i", ["arch64i"]]
|
||||
]
|
||||
["rv64i", ["arch64i"]],
|
||||
["rv64gc", ["arch64f", "arch64d", "arch64zfh", "arch64f_fma", "arch64d_fma", "arch64zfh_fma", "arch64f_divsqrt",
|
||||
"arch64d_divsqrt", "arch64zfh_divsqrt", "arch64zfaf", "arch64zfad", "coverage64gc", "arch64i", "arch64priv",
|
||||
"arch64c", "arch64m", "arch64zcb", "arch64zifencei", "arch64zicond", "arch64a_amo", "wally64a_lrsc",
|
||||
"wally64periph", "wally64priv", "arch64zbkb", "arch64zbkc", "arch64zbkx", "arch64zknd", "arch64zkne", "arch64zknh",
|
||||
"arch64zba", "arch64zbb", "arch64zbc", "arch64zbs", "arch64pmp"]], # add when working: "arch64zicboz"
|
||||
]
|
||||
|
||||
# Separate test for short buildroot run through OpenSBI UART output
|
||||
tests_buildrootshort = [
|
||||
["buildroot", ["buildroot"], [f"+INSTR_LIMIT=1400000"], # Instruction limit gets to first OpenSBI UART output
|
||||
["buildroot", ["buildroot"], "--args +INSTR_LIMIT=1600000", # Instruction limit gets to first OpenSBI UART output
|
||||
"OpenSBI v", "buildroot_uart.out"]
|
||||
]
|
||||
|
||||
# Separate test for full buildroot run
|
||||
tests_buildrootboot = [
|
||||
["buildroot", ["buildroot"], [f"+INSTR_LIMIT=600000000"], # boot entire buildroot Linux to login prompt
|
||||
["buildroot", ["buildroot"], "--args +INSTR_LIMIT=600000000", # boot entire buildroot Linux to login prompt
|
||||
"WallyHostname login: ", "buildroot_uart.out"]
|
||||
]
|
||||
|
||||
tests_buildrootbootlockstep = [
|
||||
["buildroot", ["buildroot"], [f"+INSTR_LIMIT=600000000 --lockstep"], # boot entire buildroot Linux to login prompt
|
||||
["buildroot", ["buildroot"], "--args +INSTR_LIMIT=600000000 --lockstep", # boot entire buildroot Linux to login prompt
|
||||
"WallyHostname login: ", "buildroot_uart.out"]
|
||||
]
|
||||
|
||||
|
||||
# Separate out floating-point tests for RV64 to speed up coverage
|
||||
tests64gc_nofp = [
|
||||
["rv64gc", ["coverage64gc", "arch64i", "arch64priv", "arch64c", "arch64m", "arch64zcb",
|
||||
"arch64zifencei", "arch64zicond", "arch64a_amo", "wally64a_lrsc", "wally64periph", "wally64priv",
|
||||
"arch64zbkb", "arch64zbkc", "arch64zbkx", "arch64zknd", "arch64zkne", "arch64zknh",
|
||||
"arch64zba", "arch64zbb", "arch64zbc", "arch64zbs", "arch64pmp"]] # add when working: "arch64zicboz"
|
||||
]
|
||||
|
||||
tests64gc_fp = [
|
||||
["rv64gc", ["arch64f", "arch64d", "arch64zfh",
|
||||
"arch64f_fma", "arch64d_fma", "arch64zfh_fma",
|
||||
"arch64f_divsqrt", "arch64d_divsqrt", "arch64zfh_divsqrt",
|
||||
"arch64zfaf", "arch64zfad"]]
|
||||
]
|
||||
|
||||
derivconfigtests = [
|
||||
# memory system
|
||||
["tlb2_rv32gc", ["wally32priv"]],
|
||||
|
@ -92,21 +94,21 @@ derivconfigtests = [
|
|||
["ram_1_1_rv64gc", ["ahb64"]],
|
||||
["ram_2_0_rv64gc", ["ahb64"]],
|
||||
["ram_2_1_rv64gc", ["ahb64"]],
|
||||
# RV32 cacheless designs will not work unless DTIM supports FLEN > XLEN. This support is not planned.
|
||||
# ["nodcache_rv32gc", ["ahb32"]],
|
||||
# ["nocache_rv32gc", ["ahb32"]],
|
||||
# RV32 cacheless designs will not work unless DTIM supports FLEN > XLEN. This support is not planned.
|
||||
# ["nodcache_rv32gc", ["ahb32"]],
|
||||
# ["nocache_rv32gc", ["ahb32"]],
|
||||
["noicache_rv32gc", ["ahb32"]],
|
||||
["noicache_rv64gc", ["ahb64"]],
|
||||
["nodcache_rv64gc", ["ahb64"]],
|
||||
["nocache_rv64gc", ["ahb64"]],
|
||||
|
||||
# Atomic variants
|
||||
# Atomic variants
|
||||
["zaamo_rv64gc", ["arch64i", "arch64a_amo"]],
|
||||
["zalrsc_rv64gc", ["arch64i", "wally64a_lrsc"]],
|
||||
["zaamo_rv32gc", ["arch32i", "arch32a_amo"]],
|
||||
["zalrsc_rv32gc", ["arch32i", "wally32a_lrsc"]],
|
||||
|
||||
# Bit manipulation and crypto variants
|
||||
# Bit manipulation and crypto variants
|
||||
["zba_rv32gc", ["arch32i", "arch32zba"]],
|
||||
["zbb_rv32gc", ["arch32i", "arch32zbb"]],
|
||||
["zbc_rv32gc", ["arch32i", "arch32zbc"]],
|
||||
|
@ -129,7 +131,7 @@ derivconfigtests = [
|
|||
["zknd_rv64gc", ["arch64i", "arch64zknd"]],
|
||||
["zknh_rv64gc", ["arch64i", "arch64zknh"]],
|
||||
|
||||
# No privilege modes variants
|
||||
# No privilege modes variants
|
||||
["noS_rv32gc", ["arch32i", "arch32f", "arch32priv", "arch32c", "arch32m", "arch32a_amo", "arch32zifencei", "arch32zicond",
|
||||
"arch32zba", "arch32zfaf", "arch32zfad", "wally32a_lrsc", "arch32zcb", "arch32zbkx", "arch32zknd"]],
|
||||
["noS_rv64gc", ["arch64i", "arch64f", "arch64priv", "arch64c", "arch64m", "arch64a_amo", "arch64zifencei", "arch64zicond",
|
||||
|
@ -165,7 +167,7 @@ derivconfigtests = [
|
|||
["div_4_2_rv64gc", ["arch64f_divsqrt", "arch64d_divsqrt", "arch64m"]],
|
||||
["div_4_2i_rv64gc", ["arch64f_divsqrt", "arch64d_divsqrt", "arch64m"]],
|
||||
["div_4_4_rv64gc", ["arch64f_divsqrt", "arch64d_divsqrt", "arch64m"]],
|
||||
["div_4_4i_rv64gc", ["arch64f_divsqrt", "arch64d_divsqrt", "arch64m"]],
|
||||
["div_4_4i_rv64gc", ["arch64f_divsqrt", "arch64d_divsqrt", "arch64m"]],
|
||||
|
||||
# fpu permutations
|
||||
["f_rv32gc", ["arch32f", "arch32f_divsqrt", "arch32f_fma", "arch32zfaf"]],
|
||||
|
@ -174,26 +176,25 @@ derivconfigtests = [
|
|||
["fdq_rv32gc", ["arch32f", "arch32f_divsqrt", "arch32f_fma", "arch32d", "arch32d_divsqrt", "arch32d_fma", "arch32i", "arch32zfaf", "arch32zfad"]],
|
||||
["fdqh_rv32gc", ["arch32f", "arch32f_divsqrt", "arch32f_fma", "arch32d", "arch32d_divsqrt", "arch32d_fma", "arch32zfh", "arch32zfh_divsqrt", "arch32i", "arch32zfaf", "arch32zfad"]],
|
||||
["f_rv64gc", ["arch64f", "arch64f_divsqrt", "arch64f_fma", "arch64zfaf"]],
|
||||
["fh_rv64gc", ["arch64f", "arch64f_divsqrt", "arch64f_fma", "arch64zfh", "arch64zfh_divsqrt", "arch64zfaf"]],
|
||||
["fh_rv64gc", ["arch64f", "arch64f_divsqrt", "arch64f_fma", "arch64zfh", "arch64zfh_divsqrt", "arch64zfaf"]],
|
||||
["fdh_rv64gc", ["arch64f", "arch64f_divsqrt", "arch64f_fma", "arch64d", "arch64d_divsqrt", "arch64d_fma", "arch64zfh", "arch64zfh_divsqrt", "arch64zfaf", "arch64zfad"]],
|
||||
["fdq_rv64gc", ["arch64f", "arch64f_divsqrt", "arch64f_fma", "arch64d", "arch64d_divsqrt", "arch64d_fma", "arch64i", "arch64zfaf", "arch64zfad"]],
|
||||
["fdqh_rv64gc", ["arch64f", "arch64f_divsqrt", "arch64f_fma", "arch64d", "arch64d_divsqrt", "arch64d_fma", "arch64zfh", "arch64zfh_divsqrt", "arch64i", "arch64zfaf", "arch64zfad"]], # "wally64q" when Q is supported again in riscof config file
|
||||
]
|
||||
|
||||
bpredtests = [
|
||||
|
||||
["nobpred_rv32gc", ["rv32i"]],
|
||||
["bpred_TWOBIT_6_16_10_0_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_8_16_10_0_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_10_16_10_0_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_10_16_10_0_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_12_16_10_0_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_14_16_10_0_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_14_16_10_0_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_16_16_10_0_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_6_16_10_1_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_8_16_10_1_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_10_16_10_1_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_10_16_10_1_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_12_16_10_1_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_14_16_10_1_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_14_16_10_1_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
["bpred_TWOBIT_16_16_10_1_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
|
||||
["bpred_GSHARE_6_16_10_0_rv32gc", ["embench"], "-GPrintHPMCounters=1"],
|
||||
|
@ -230,6 +231,33 @@ bpredtests = [
|
|||
["bpred_GSHARE_10_10_10_1_rv32gc", ["embench"], "-GPrintHPMCounters=1"]
|
||||
]
|
||||
|
||||
testfloatdivconfigs = [
|
||||
"fdh_div_2_1_rv32gc", "fdh_div_2_1_rv64gc", "fdh_div_2_2_rv32gc",
|
||||
"fdh_div_2_2_rv64gc", "fdh_div_2_4_rv32gc", "fdh_div_2_4_rv64gc",
|
||||
"fdh_div_4_1_rv32gc", "fdh_div_4_1_rv64gc", "fdh_div_4_2_rv32gc",
|
||||
"fdh_div_4_2_rv64gc", "fdh_div_4_4_rv32gc", "fdh_div_4_4_rv64gc",
|
||||
"fd_div_2_1_rv32gc", "fd_div_2_1_rv64gc", "fd_div_2_2_rv32gc",
|
||||
"fd_div_2_2_rv64gc", "fd_div_2_4_rv32gc", "fd_div_2_4_rv64gc",
|
||||
"fd_div_4_1_rv32gc", "fd_div_4_1_rv64gc", "fd_div_4_2_rv32gc",
|
||||
"fd_div_4_2_rv64gc", "fd_div_4_4_rv32gc", "fd_div_4_4_rv64gc",
|
||||
"fdqh_div_2_1_rv32gc", "fdqh_div_2_1_rv64gc", "fdqh_div_2_2_rv32gc",
|
||||
"fdqh_div_2_2_rv64gc", "fdqh_div_2_4_rv32gc", "fdqh_div_2_4_rv64gc",
|
||||
"fdqh_div_4_1_rv32gc", "fdqh_div_4_1_rv64gc", "fdqh_div_4_2_rv32gc",
|
||||
"fdqh_div_4_2_rv64gc", "fdqh_div_4_4_rv32gc", "fdqh_div_4_4_rv64gc",
|
||||
"fdq_div_2_1_rv32gc", "fdq_div_2_1_rv64gc", "fdq_div_2_2_rv32gc",
|
||||
"fdq_div_2_2_rv64gc", "fdq_div_2_4_rv32gc", "fdq_div_2_4_rv64gc",
|
||||
"fdq_div_4_1_rv32gc", "fdq_div_4_1_rv64gc", "fdq_div_4_2_rv32gc",
|
||||
"fdq_div_4_2_rv64gc", "fdq_div_4_4_rv32gc", "fdq_div_4_4_rv64gc",
|
||||
"fh_div_2_1_rv32gc", "fh_div_2_1_rv64gc", "fh_div_2_2_rv32gc",
|
||||
"fh_div_2_2_rv64gc", "fh_div_2_4_rv32gc", "fh_div_2_4_rv64gc",
|
||||
"fh_div_4_1_rv32gc", "fh_div_4_1_rv64gc", "fh_div_4_2_rv32gc",
|
||||
"fh_div_4_2_rv64gc", "fh_div_4_4_rv32gc", "fh_div_4_4_rv64gc",
|
||||
"f_div_2_1_rv32gc", "f_div_2_1_rv64gc", "f_div_2_2_rv32gc",
|
||||
"f_div_2_2_rv64gc", "f_div_2_4_rv32gc", "f_div_2_4_rv64gc",
|
||||
"f_div_4_1_rv32gc", "f_div_4_1_rv64gc", "f_div_4_2_rv32gc",
|
||||
"f_div_4_2_rv64gc", "f_div_4_4_rv32gc", "f_div_4_4_rv64gc"
|
||||
]
|
||||
|
||||
# list of tests not supported by ImperasDV yet that should be waived during lockstep testing
|
||||
lockstepwaivers = [
|
||||
"WALLY-q-01.S_ref.elf", # Q extension is not supported by ImperasDV
|
||||
|
@ -261,109 +289,77 @@ class bcolors:
|
|||
BOLD = '\033[1m'
|
||||
UNDERLINE = '\033[4m'
|
||||
|
||||
def addTests(tests, sim):
|
||||
sim_logdir = WALLY+ "/sim/" + sim + "/logs/"
|
||||
for test in tests:
|
||||
|
||||
def addTests(testList, sim, coverStr, configs):
|
||||
sim_logdir = f"{regressionDir}/{sim}/logs/"
|
||||
for test in testList:
|
||||
config = test[0]
|
||||
suites = test[1]
|
||||
if (len(test) >= 3):
|
||||
args = " --args " + " ".join(test[2])
|
||||
else:
|
||||
args = ""
|
||||
if (len(test) >= 4):
|
||||
gs = test[3]
|
||||
else:
|
||||
gs = "All tests ran without failures"
|
||||
cmdPrefix="wsim --sim " + sim + " " + coverStr + " " + config
|
||||
flags = f"{test[2]}" if len(test) >= 3 else ""
|
||||
gs = test[3] if len(test) >= 4 else "All tests ran without failures"
|
||||
cmdPrefix=f"wsim --sim {sim} {coverStr} {flags} {config}"
|
||||
for t in suites:
|
||||
sim_log = sim_logdir + config + "_" + t + ".log"
|
||||
if (len(test) >= 5):
|
||||
grepfile = sim_logdir + test[4]
|
||||
else:
|
||||
grepfile = sim_log
|
||||
sim_log = f"{sim_logdir}{config}_{t}.log"
|
||||
grepfile = sim_logdir + test[4] if len(test) >= 5 else sim_log
|
||||
tc = TestCase(
|
||||
name=t,
|
||||
variant=config,
|
||||
cmd=cmdPrefix + " " + t + args + " > " + sim_log,
|
||||
cmd=f"{cmdPrefix} {t} > {sim_log}",
|
||||
grepstr=gs,
|
||||
grepfile = grepfile)
|
||||
configs.append(tc)
|
||||
|
||||
|
||||
def addTestsByDir(dir, config, sim, lockstepMode=0, brekerMode=0):
|
||||
if os.path.isdir(dir):
|
||||
sim_logdir = WALLY+ "/sim/" + sim + "/logs/"
|
||||
if coverStr == "--fcov": # use --fcov in place of --lockstep
|
||||
cmdPrefix="wsim --sim " + sim + " " + coverStr + " " + config
|
||||
gs="Mismatches : 0"
|
||||
if ("cvw-arch-verif/tests" in dir and not "priv" in dir):
|
||||
fileEnd = "ALL.elf"
|
||||
else:
|
||||
fileEnd = ".elf"
|
||||
elif coverStr == "--ccov":
|
||||
cmdPrefix="wsim --sim " + sim + " " + coverStr + " " + config
|
||||
gs="Single Elf file tests are not signatured verified."
|
||||
if ("cvw-arch-verif/tests" in dir and not "priv" in dir):
|
||||
fileEnd = "ALL.elf"
|
||||
else:
|
||||
fileEnd = ".elf"
|
||||
elif lockstepMode:
|
||||
cmdPrefix="wsim --lockstep --sim " + sim + " " + config
|
||||
gs="Mismatches : 0"
|
||||
fileEnd = ".elf"
|
||||
elif brekerMode:
|
||||
cmdPrefix="wsim --sim " + sim + " " + config
|
||||
gs="# trek: info: summary: Test PASSED"
|
||||
fileEnd = ".elf"
|
||||
else:
|
||||
cmdPrefix="wsim --sim " + sim + " " + config
|
||||
gs="Single Elf file tests are not signatured verified."
|
||||
fileEnd = ".elf"
|
||||
for dirpath, dirnames, filenames in os.walk(os.path.abspath(dir)):
|
||||
for file in filenames:
|
||||
# fcov lockstep only runs on WALLY-COV-ALL.elf files; other lockstep runs on all files
|
||||
if file.endswith(fileEnd):
|
||||
fullfile = os.path.join(dirpath, file)
|
||||
fields = fullfile.rsplit('/', 3)
|
||||
if (fields[2] == "ref"):
|
||||
shortelf = fields[1] + "_" + fields[3]
|
||||
else:
|
||||
shortelf = fields[2] + "_" + fields[3]
|
||||
if (shortelf in lockstepwaivers): # skip tests that itch bugs in ImperasDV
|
||||
print(f"{bcolors.WARNING}Skipping waived test {shortelf}{bcolors.ENDC}")
|
||||
continue
|
||||
sim_log = sim_logdir + config + "_" + shortelf + ".log"
|
||||
grepstring = ""
|
||||
tc = TestCase(
|
||||
name=file,
|
||||
variant=config,
|
||||
cmd=cmdPrefix + " " + fullfile + " > " + sim_log,
|
||||
grepstr=gs,
|
||||
grepfile = sim_log)
|
||||
configs.append(tc)
|
||||
def addTestsByDir(testDir, config, sim, coverStr, configs, lockstepMode=0, brekerMode=0):
|
||||
if not os.path.isdir(testDir):
|
||||
print(f"Error: Directory not found: {testDir}")
|
||||
sys.exit(1)
|
||||
|
||||
sim_logdir = f"{regressionDir}/{sim}/logs/"
|
||||
cmdPrefix = f"wsim --sim {sim} {coverStr} {'--lockstep' if lockstepMode else ''} {config}"
|
||||
# fcov/ccov only runs on WALLY-COV-ALL.elf files; other lockstep runs on all files
|
||||
fileStart = "WALLY-COV-ALL" if "cvw-arch-verif/tests" in testDir and "priv" not in testDir and (coverStr == "--fcov" or coverStr == "--ccov") else ""
|
||||
fileEnd = ".elf"
|
||||
if lockstepMode or coverStr == "--fcov":
|
||||
gs = "Mismatches : 0"
|
||||
elif brekerMode:
|
||||
gs="# trek: info: summary: Test PASSED"
|
||||
else:
|
||||
print("Error: Directory not found: " + dir)
|
||||
exit(1)
|
||||
gs = "Single Elf file tests are not signatured verified."
|
||||
for dirpath, _, filenames in os.walk(os.path.abspath(testDir)):
|
||||
for file in filenames:
|
||||
if file.endswith(fileEnd) and file.startswith(fileStart):
|
||||
fullfile = os.path.join(dirpath, file)
|
||||
fields = fullfile.rsplit('/', 3)
|
||||
if fields[2] == "ref":
|
||||
shortelf = f"{fields[1]}_{fields[3]}"
|
||||
else:
|
||||
shortelf = f"{fields[2]}_{fields[3]}"
|
||||
if shortelf in lockstepwaivers: # skip tests that itch bugs in ImperasDV
|
||||
print(f"{bcolors.WARNING}Skipping waived test {shortelf}{bcolors.ENDC}")
|
||||
continue
|
||||
sim_log = f"{sim_logdir}{config}_{shortelf}.log"
|
||||
tc = TestCase(
|
||||
name=file,
|
||||
variant=config,
|
||||
cmd=f"{cmdPrefix} {fullfile} > {sim_log}",
|
||||
grepstr=gs,
|
||||
grepfile = sim_log)
|
||||
configs.append(tc)
|
||||
|
||||
def search_log_for_text(text, grepfile):
|
||||
"""Search through the given log file for text, returning True if it is found or False if it is not"""
|
||||
grepwarn = "grep -i -H Warning: " + grepfile
|
||||
os.system(grepwarn)
|
||||
greperr = "grep -i -H Error: " + grepfile
|
||||
os.system(greperr)
|
||||
grepcmd = "grep -a -e '%s' '%s' > /dev/null" % (text, grepfile)
|
||||
# print(" search_log_for_text invoking %s" % grepcmd)
|
||||
return os.system(grepcmd) == 0
|
||||
with open(grepfile, errors="ignore") as file:
|
||||
content = file.readlines()
|
||||
for line in content:
|
||||
if "warning:" in line.lower():
|
||||
print(f"{bcolors.WARNING}{line.strip()}{bcolors.ENDC}")
|
||||
if "error:" in line.lower():
|
||||
print(f"{bcolors.FAIL}{line.strip()}{bcolors.ENDC}")
|
||||
return any(text in line for line in content)
|
||||
|
||||
def run_test_case(config, dryrun: bool = False):
|
||||
"""
|
||||
Run the given test case, and return 0 if the test suceeds and 1 if it fails
|
||||
|
||||
Do not execute commands if dryrun
|
||||
"""
|
||||
grepfile = config.grepfile
|
||||
cmd = config.cmd
|
||||
os.chdir(regressionDir)
|
||||
if dryrun:
|
||||
print(f"Executing {cmd}", flush=True)
|
||||
return 0
|
||||
|
@ -371,194 +367,136 @@ def run_test_case(config, dryrun: bool = False):
|
|||
os.system(cmd)
|
||||
if search_log_for_text(config.grepstr, grepfile):
|
||||
# Flush is needed to flush output to stdout when running in multiprocessing Pool
|
||||
# print(f"{bcolors.OKGREEN}%s_%s: Success{bcolors.ENDC}" % (config.variant, config.name), flush=True)
|
||||
print(f"{bcolors.OKGREEN}%s: Success{bcolors.ENDC}" % (config.cmd), flush=True)
|
||||
print(f"{bcolors.OKGREEN}{cmd}: Success{bcolors.ENDC}", flush=True)
|
||||
return 0
|
||||
else:
|
||||
print(f"{bcolors.FAIL}%s: Failures detected in output{bcolors.ENDC}" % (config.cmd), flush=True)
|
||||
print(" Check %s" % grepfile)
|
||||
print(f"{bcolors.FAIL}{cmd}: Failures detected in output{bcolors.ENDC}", flush=True)
|
||||
print(f" Check {grepfile}", flush=True)
|
||||
return 1
|
||||
|
||||
##################################
|
||||
# Main body
|
||||
##################################
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--ccov", help="Code Coverage", action="store_true")
|
||||
parser.add_argument("--fcov", help="Functional Coverage", action="store_true")
|
||||
parser.add_argument("--nightly", help="Run large nightly regression", action="store_true")
|
||||
parser.add_argument("--buildroot", help="Include Buildroot Linux boot test (takes many hours, done along with --nightly)", action="store_true")
|
||||
parser.add_argument("--testfloat", help="Include Testfloat floating-point unit tests", action="store_true")
|
||||
parser.add_argument("--fp", help="Include floating-point tests in coverage (slower runtime)", action="store_true") # Currently not used
|
||||
parser.add_argument("--breker", help="Run Breker tests", action="store_true") # Requires a license for the breker tool. See tests/breker/README.md for details
|
||||
parser.add_argument("--dryrun", help="Print commands invoked to console without running regression", action="store_true")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
WALLY = os.environ.get('WALLY')
|
||||
regressionDir = WALLY + '/sim'
|
||||
os.chdir(regressionDir)
|
||||
|
||||
coveragesim = "questa" # Questa is required for code/functional coverage
|
||||
#defaultsim = "questa" # Default simulator for all other tests; change to Verilator when flow is ready
|
||||
defaultsim = "verilator" # Default simulator for all other tests
|
||||
lockstepsim = "questa"
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--ccov", help="Code Coverage", action="store_true")
|
||||
parser.add_argument("--fcov", help="Functional Coverage", action="store_true")
|
||||
parser.add_argument("--nightly", help="Run large nightly regression", action="store_true")
|
||||
parser.add_argument("--buildroot", help="Include Buildroot Linux boot test (takes many hours, done along with --nightly)", action="store_true")
|
||||
parser.add_argument("--testfloat", help="Include Testfloat floating-point unit tests", action="store_true")
|
||||
parser.add_argument("--fp", help="Include floating-point tests in coverage (slower runtime)", action="store_true") # Currently not used
|
||||
parser.add_argument("--breker", help="Run Breker tests", action="store_true") # Requires a license for the breker tool. See tests/breker/README.md for details
|
||||
parser.add_argument("--dryrun", help="Print commands invoked to console without running regression", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if (args.nightly):
|
||||
nightMode = "--nightly"
|
||||
sims = ["questa", "verilator", "vcs"] # exercise all simulators; can omit a sim if no license is available
|
||||
else:
|
||||
nightMode = ""
|
||||
sims = [defaultsim]
|
||||
|
||||
if (args.ccov): # only run RV64GC tests in coverage mode
|
||||
coverStr = '--ccov'
|
||||
elif (args.fcov): # only run RV64GC tests in lockstep in coverage mode
|
||||
coverStr = '--fcov'
|
||||
else:
|
||||
coverStr = ''
|
||||
|
||||
|
||||
# Run Lint
|
||||
configs = [
|
||||
TestCase(
|
||||
name="lints",
|
||||
variant="all",
|
||||
cmd="lint-wally " + nightMode + " | tee " + WALLY + "/sim/verilator/logs/all_lints.log",
|
||||
grepstr="lints run with no errors or warnings",
|
||||
grepfile = WALLY + "/sim/verilator/logs/all_lints.log")
|
||||
]
|
||||
|
||||
|
||||
|
||||
# run full buildroot boot simulation (slow) if buildroot flag is set. Start it early to overlap with other tests
|
||||
if (args.buildroot):
|
||||
# addTests(tests_buildrootboot, defaultsim) # non-lockstep with Verilator runs in about 2 hours
|
||||
addTests(tests_buildrootbootlockstep, lockstepsim) # lockstep with Questa and ImperasDV runs overnight
|
||||
|
||||
if (args.ccov): # only run RV64GC tests on Questa in code coverage mode
|
||||
addTestsByDir(WALLY+"/addins/cvw-arch-verif/tests/lockstep/rv64/", "rv64gc", coveragesim)
|
||||
addTestsByDir(WALLY+"/addins/cvw-arch-verif/tests/lockstep/priv/rv64/", "rv64gc", coveragesim)
|
||||
addTestsByDir(WALLY+"/tests/coverage/", "rv64gc", coveragesim)
|
||||
elif (args.fcov): # run tests in lockstep in functional coverage mode
|
||||
addTestsByDir(WALLY+"/addins/cvw-arch-verif/tests/lockstep/rv32/", "rv32gc", coveragesim)
|
||||
addTestsByDir(WALLY+"/addins/cvw-arch-verif/tests/lockstep/rv64/", "rv64gc", coveragesim)
|
||||
addTestsByDir(WALLY+"/addins/cvw-arch-verif/tests/lockstep/priv/rv32/", "rv32gc", coveragesim)
|
||||
addTestsByDir(WALLY+"/addins/cvw-arch-verif/tests/lockstep/priv/rv64/", "rv64gc", coveragesim)
|
||||
elif (args.breker):
|
||||
addTestsByDir(WALLY+"/tests/breker/work", "breker", "questa", brekerMode=1)
|
||||
else:
|
||||
for sim in sims:
|
||||
if (not (args.buildroot and sim == lockstepsim)): # skip short buildroot sim if running long one
|
||||
addTests(tests_buildrootshort, sim)
|
||||
addTests(tests, sim)
|
||||
addTests(tests64gc_nofp, sim)
|
||||
addTests(tests64gc_fp, sim)
|
||||
|
||||
# run derivative configurations and lockstep tests in nightly regression
|
||||
if (args.nightly):
|
||||
addTestsByDir(WALLY+"/tests/coverage", "rv64gc", lockstepsim, lockstepMode=1)
|
||||
addTestsByDir(WALLY+"/tests/riscof/work/wally-riscv-arch-test/rv64i_m", "rv64gc", lockstepsim, lockstepMode=1)
|
||||
addTestsByDir(WALLY+"/tests/riscof/work/wally-riscv-arch-test/rv32i_m", "rv32gc", lockstepsim, lockstepMode=1)
|
||||
addTests(derivconfigtests, defaultsim)
|
||||
# addTests(bpredtests, defaultsim) # This is currently broken in regression due to something related to the new wsim script.
|
||||
|
||||
# testfloat tests
|
||||
if (args.testfloat): # for testfloat alone, just run testfloat tests
|
||||
configs = []
|
||||
if (args.testfloat or args.nightly): # for nightly, run testfloat along with others
|
||||
testfloatsim = "questa" # change to Verilator when Issue #707 about testfloat not running Verilator is resolved
|
||||
testfloatconfigs = ["fdqh_rv64gc", "fdq_rv64gc", "fdh_rv64gc", "fd_rv64gc", "fh_rv64gc", "f_rv64gc", "fdqh_rv32gc", "f_rv32gc"]
|
||||
for config in testfloatconfigs:
|
||||
tests = ["div", "sqrt", "add", "sub", "mul", "cvtint", "cvtfp", "fma", "cmp"]
|
||||
if ("f_" in config):
|
||||
tests.remove("cvtfp")
|
||||
for test in tests:
|
||||
sim_log = WALLY + "/sim/" + testfloatsim + "/logs/"+config+"_"+test+".log"
|
||||
tc = TestCase(
|
||||
name=test,
|
||||
variant=config,
|
||||
cmd="wsim --tb testbench_fp --sim " + testfloatsim + " " + config + " " + test + " > " + sim_log,
|
||||
grepstr="All Tests completed with 0 errors",
|
||||
grepfile = sim_log)
|
||||
configs.append(tc)
|
||||
|
||||
|
||||
testfloatdivconfigs = [
|
||||
"fdh_div_2_1_rv32gc", "fdh_div_2_1_rv64gc", "fdh_div_2_2_rv32gc",
|
||||
"fdh_div_2_2_rv64gc", "fdh_div_2_4_rv32gc", "fdh_div_2_4_rv64gc",
|
||||
"fdh_div_4_1_rv32gc", "fdh_div_4_1_rv64gc", "fdh_div_4_2_rv32gc",
|
||||
"fdh_div_4_2_rv64gc", "fdh_div_4_4_rv32gc", "fdh_div_4_4_rv64gc",
|
||||
"fd_div_2_1_rv32gc", "fd_div_2_1_rv64gc", "fd_div_2_2_rv32gc",
|
||||
"fd_div_2_2_rv64gc", "fd_div_2_4_rv32gc", "fd_div_2_4_rv64gc",
|
||||
"fd_div_4_1_rv32gc", "fd_div_4_1_rv64gc", "fd_div_4_2_rv32gc",
|
||||
"fd_div_4_2_rv64gc", "fd_div_4_4_rv32gc", "fd_div_4_4_rv64gc",
|
||||
"fdqh_div_2_1_rv32gc", "fdqh_div_2_1_rv64gc", "fdqh_div_2_2_rv32gc",
|
||||
"fdqh_div_2_2_rv64gc", "fdqh_div_2_4_rv32gc", "fdqh_div_2_4_rv64gc",
|
||||
"fdqh_div_4_1_rv32gc", "fdqh_div_4_1_rv64gc", "fdqh_div_4_2_rv32gc",
|
||||
"fdqh_div_4_2_rv64gc", "fdqh_div_4_4_rv32gc", "fdqh_div_4_4_rv64gc",
|
||||
"fdq_div_2_1_rv32gc", "fdq_div_2_1_rv64gc", "fdq_div_2_2_rv32gc",
|
||||
"fdq_div_2_2_rv64gc", "fdq_div_2_4_rv32gc", "fdq_div_2_4_rv64gc",
|
||||
"fdq_div_4_1_rv32gc", "fdq_div_4_1_rv64gc", "fdq_div_4_2_rv32gc",
|
||||
"fdq_div_4_2_rv64gc", "fdq_div_4_4_rv32gc", "fdq_div_4_4_rv64gc",
|
||||
"fh_div_2_1_rv32gc", "fh_div_2_1_rv64gc", "fh_div_2_2_rv32gc",
|
||||
"fh_div_2_2_rv64gc", "fh_div_2_4_rv32gc", "fh_div_2_4_rv64gc",
|
||||
"fh_div_4_1_rv32gc", "fh_div_4_1_rv64gc", "fh_div_4_2_rv32gc",
|
||||
"fh_div_4_2_rv64gc", "fh_div_4_4_rv32gc", "fh_div_4_4_rv64gc",
|
||||
"f_div_2_1_rv32gc", "f_div_2_1_rv64gc", "f_div_2_2_rv32gc",
|
||||
"f_div_2_2_rv64gc", "f_div_2_4_rv32gc", "f_div_2_4_rv64gc",
|
||||
"f_div_4_1_rv32gc", "f_div_4_1_rv64gc", "f_div_4_2_rv32gc",
|
||||
"f_div_4_2_rv64gc", "f_div_4_4_rv32gc", "f_div_4_4_rv64gc"
|
||||
]
|
||||
for config in testfloatdivconfigs:
|
||||
# div test case
|
||||
tests = ["div", "sqrt", "cvtint", "cvtfp"]
|
||||
if ("f_" in config):
|
||||
tests.remove("cvtfp")
|
||||
for test in tests:
|
||||
sim_log = WALLY + "/sim/questa/logs/"+config+"_"+test+".log"
|
||||
tc = TestCase(
|
||||
name=test,
|
||||
variant=config,
|
||||
cmd="wsim --tb testbench_fp " + config + " " + test + " > " + sim_log,
|
||||
grepstr="All Tests completed with 0 errors",
|
||||
grepfile = WALLY + "/sim/questa/logs/"+config+"_"+test+".log")
|
||||
configs.append(tc)
|
||||
|
||||
|
||||
def main():
|
||||
"""Run the tests and count the failures"""
|
||||
global configs, args
|
||||
os.chdir(regressionDir)
|
||||
dirs = ["questa/logs", "questa/wkdir", "verilator/logs", "verilator/wkdir", "vcs/logs", "vcs/wkdir"]
|
||||
for d in dirs:
|
||||
try:
|
||||
os.system('rm -rf %s' % d)
|
||||
os.mkdir(d)
|
||||
except:
|
||||
pass
|
||||
def process_args(args):
|
||||
coverStr = ""
|
||||
# exercise all simulators in nightly; can omit a sim if no license is available
|
||||
sims = ["questa", "verilator", "vcs"] if args.nightly else [defaultsim]
|
||||
if args.ccov:
|
||||
sims = [coveragesim]
|
||||
coverStr = "--ccov"
|
||||
TIMEOUT_DUR = 20*60 # seconds
|
||||
os.system('rm -f questa/ucdb/* questa/cov/*')
|
||||
for d in ["ucdb", "cov"]:
|
||||
shutil.rmtree(f"{regressionDir}/questa/{d}", ignore_errors=True)
|
||||
os.makedirs(f"{regressionDir}/questa/{d}", exist_ok=True)
|
||||
elif args.fcov:
|
||||
sims = [coveragesim]
|
||||
coverStr = "--fcov"
|
||||
TIMEOUT_DUR = 8*60
|
||||
os.system('rm -f questa/fcov_ucdb/* questa/fcov_logs/* questa/fcov/*')
|
||||
shutil.rmtree(f"{regressionDir}/questa/fcov_ucdb", ignore_errors=True)
|
||||
os.makedirs(f"{regressionDir}/questa/fcov_ucdb", exist_ok=True)
|
||||
elif args.buildroot:
|
||||
TIMEOUT_DUR = 60*1440 # 1 day
|
||||
elif args.testfloat:
|
||||
sims = [testfloatsim]
|
||||
TIMEOUT_DUR = 30*60 # seconds
|
||||
elif args.nightly:
|
||||
TIMEOUT_DUR = 30*60 # seconds
|
||||
else:
|
||||
TIMEOUT_DUR = 10*60 # seconds
|
||||
|
||||
return sims, coverStr, TIMEOUT_DUR
|
||||
|
||||
|
||||
def selectTests(args, sims, coverStr):
|
||||
# Run Lint
|
||||
configs = [
|
||||
TestCase(
|
||||
name="lints",
|
||||
variant="all",
|
||||
cmd=f"lint-wally {'--nightly' if args.nightly else ''} | tee {regressionDir}/verilator/logs/all_lints.log",
|
||||
grepstr="lints run with no errors or warnings",
|
||||
grepfile = f"{regressionDir}/verilator/logs/all_lints.log")
|
||||
]
|
||||
|
||||
# run full buildroot boot simulation (slow) if buildroot flag is set. Start it early to overlap with other tests
|
||||
if args.buildroot:
|
||||
# addTests(tests_buildrootboot, defaultsim) # non-lockstep with Verilator runs in about 2 hours
|
||||
addTests(tests_buildrootbootlockstep, lockstepsim, coverStr, configs) # lockstep with Questa and ImperasDV runs overnight
|
||||
|
||||
if args.ccov: # only run RV64GC tests on Questa in code coverage mode
|
||||
addTestsByDir(f"{archVerifDir}/tests/lockstep/rv64/", "rv64gc", coveragesim, coverStr, configs)
|
||||
addTestsByDir(f"{archVerifDir}/tests/lockstep/priv/rv64/", "rv64gc", coveragesim, coverStr, configs)
|
||||
addTestsByDir(WALLY+"/tests/coverage/", "rv64gc", coveragesim, coverStr, configs)
|
||||
elif args.fcov: # run tests in lockstep in functional coverage mode
|
||||
addTestsByDir(f"{archVerifDir}/tests/lockstep/rv32/", "rv32gc", coveragesim, coverStr, configs)
|
||||
addTestsByDir(f"{archVerifDir}/tests/lockstep/rv64/", "rv64gc", coveragesim, coverStr, configs)
|
||||
addTestsByDir(f"{archVerifDir}/tests/lockstep/priv/rv32/", "rv32gc", coveragesim, coverStr, configs)
|
||||
addTestsByDir(f"{archVerifDir}/tests/lockstep/priv/rv64/", "rv64gc", coveragesim, coverStr, configs)
|
||||
elif args.breker:
|
||||
addTestsByDir(WALLY+"/tests/breker/work", "breker", "questa", coverStr, configs, brekerMode=1)
|
||||
elif not args.testfloat:
|
||||
for sim in sims:
|
||||
if not (args.buildroot and sim == lockstepsim): # skip short buildroot sim if running long one
|
||||
addTests(tests_buildrootshort, sim, coverStr, configs)
|
||||
addTests(standard_tests, sim, coverStr, configs)
|
||||
|
||||
# run derivative configurations and lockstep tests in nightly regression
|
||||
if args.nightly:
|
||||
addTestsByDir(WALLY+"/tests/coverage", "rv64gc", lockstepsim, coverStr, configs, lockstepMode=1)
|
||||
addTestsByDir(WALLY+"/tests/riscof/work/wally-riscv-arch-test/rv64i_m", "rv64gc", lockstepsim, coverStr, configs, lockstepMode=1)
|
||||
addTestsByDir(WALLY+"/tests/riscof/work/wally-riscv-arch-test/rv32i_m", "rv32gc", lockstepsim, coverStr, configs, lockstepMode=1)
|
||||
addTests(derivconfigtests, defaultsim, coverStr, configs)
|
||||
# addTests(bpredtests, defaultsim) # This is currently broken in regression due to something related to the new wsim script.
|
||||
|
||||
# testfloat tests
|
||||
if (args.testfloat or args.nightly): # for nightly, run testfloat along with others
|
||||
testfloatconfigs = ["fdqh_rv64gc", "fdq_rv64gc", "fdh_rv64gc", "fd_rv64gc", "fh_rv64gc", "f_rv64gc", "fdqh_rv32gc", "f_rv32gc"]
|
||||
for config in testfloatconfigs + testfloatdivconfigs:
|
||||
if config in testfloatconfigs:
|
||||
tests = ["div", "sqrt", "add", "sub", "mul", "cvtint", "cvtfp", "fma", "cmp"]
|
||||
else:
|
||||
tests = ["div", "sqrt", "cvtint", "cvtfp"]
|
||||
if "f_" in config:
|
||||
tests.remove("cvtfp")
|
||||
for test in tests:
|
||||
sim_log = f"{regressionDir}/{testfloatsim}/logs/{config}_{test}.log"
|
||||
tc = TestCase(
|
||||
name=test,
|
||||
variant=config,
|
||||
cmd=f"wsim --tb testbench_fp --sim {testfloatsim} {config} {test} > {sim_log}",
|
||||
grepstr="All Tests completed with 0 errors",
|
||||
grepfile = sim_log)
|
||||
configs.append(tc)
|
||||
return configs
|
||||
|
||||
|
||||
def makeDirs(sims):
|
||||
for sim in sims:
|
||||
dirs = [f"{regressionDir}/{sim}/wkdir", f"{regressionDir}/{sim}/logs"]
|
||||
for d in dirs:
|
||||
shutil.rmtree(d, ignore_errors=True)
|
||||
os.makedirs(d, exist_ok=True)
|
||||
|
||||
|
||||
def main(args):
|
||||
sims, coverStr, TIMEOUT_DUR = process_args(args)
|
||||
makeDirs(sims)
|
||||
configs = selectTests(args, sims, coverStr)
|
||||
# Scale the number of concurrent processes to the number of test cases, but
|
||||
# max out at a limited number of concurrent processes to not overwhelm the system
|
||||
# right now fcov, ccov, nightly all use Imperas
|
||||
if (args.ccov or args.fcov or args.nightly):
|
||||
ImperasDVLicenseCount = 16 # limit number of concurrent processes to avoid overloading ImperasDV licenses
|
||||
else:
|
||||
ImperasDVLicenseCount = 10000 # effectively no license limit for non-lockstep tests
|
||||
# right now fcov and nightly use Imperas
|
||||
ImperasDVLicenseCount = 16 if args.fcov or args.nightly else 10000
|
||||
with Pool(processes=min(len(configs),multiprocessing.cpu_count(), ImperasDVLicenseCount)) as pool:
|
||||
num_fail = 0
|
||||
results = {}
|
||||
|
@ -567,22 +505,24 @@ def main():
|
|||
for (config,result) in results.items():
|
||||
try:
|
||||
num_fail+=result.get(timeout=TIMEOUT_DUR)
|
||||
except TimeoutError:
|
||||
except MPTimeoutError:
|
||||
pool.terminate()
|
||||
pool.join()
|
||||
num_fail+=1
|
||||
print(f"{bcolors.FAIL}%s: Timeout - runtime exceeded %d seconds{bcolors.ENDC}" % (config.cmd, TIMEOUT_DUR))
|
||||
print(f"{bcolors.FAIL}{config.cmd}: Timeout - runtime exceeded {TIMEOUT_DUR} seconds{bcolors.ENDC}")
|
||||
|
||||
# Coverage report
|
||||
if args.ccov:
|
||||
os.system('make QuestaCodeCoverage')
|
||||
os.system(f"make -C {regressionDir}/QuestaCodeCoverage")
|
||||
if args.fcov:
|
||||
os.system('make -C '+WALLY+'/addins/cvw-arch-verif merge')
|
||||
os.system(f"make -C {archVerifDir} merge")
|
||||
# Count the number of failures
|
||||
if num_fail:
|
||||
print(f"{bcolors.FAIL}Regression failed with %s failed configurations{bcolors.ENDC}" % num_fail)
|
||||
print(f"{bcolors.FAIL}Regression failed with {num_fail} failed configurations{bcolors.ENDC}")
|
||||
else:
|
||||
print(f"{bcolors.OKGREEN}SUCCESS! All tests ran without failures{bcolors.ENDC}")
|
||||
return num_fail
|
||||
|
||||
if __name__ == '__main__':
|
||||
exit(main())
|
||||
args = parse_args()
|
||||
sys.exit(main(args))
|
||||
|
|
|
@ -99,7 +99,7 @@ git_check() {
|
|||
|
||||
# Log output to a file and only print lines with keywords
|
||||
logger() {
|
||||
local log_file="$RISCV/logs/$1.log"
|
||||
local log_file="$RISCV/logs/$STATUS.log"
|
||||
local keyword_pattern="(\bwarning|\berror|\bfail|\bsuccess|\bstamp|\bdoesn't work)"
|
||||
local exclude_pattern="(_warning|warning_|_error|error_|-warning|warning-|-error|error-|Werror|error\.o|warning flags)"
|
||||
|
||||
|
@ -126,6 +126,8 @@ fi
|
|||
|
||||
# Determine script directory to locate related scripts
|
||||
dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
WALLY=$(dirname "$dir")
|
||||
export WALLY
|
||||
|
||||
# Get Linux distro and version
|
||||
source "${dir}"/wally-distro-check.sh
|
||||
|
@ -181,7 +183,7 @@ echo "Using $NUM_THREADS thread(s) for compilation"
|
|||
mkdir -p "$RISCV"/logs
|
||||
|
||||
# Install/update system packages if root. Otherwise, check that packages are already installed.
|
||||
STATUS="system packages"
|
||||
STATUS="system_packages"
|
||||
if [ "$ROOT" == true ]; then
|
||||
source "${dir}"/wally-package-install.sh
|
||||
else
|
||||
|
@ -209,7 +211,7 @@ fi
|
|||
# Create python virtual environment so the python command targets desired version of python
|
||||
# and installed packages are isolated from the rest of the system.
|
||||
section_header "Setting up Python Environment"
|
||||
STATUS="python virtual environment"
|
||||
STATUS="python_virtual_environment"
|
||||
cd "$RISCV"
|
||||
if [ ! -e "$RISCV"/riscv-python/bin/activate ]; then
|
||||
"$PYTHON_VERSION" -m venv riscv-python --prompt cvw
|
||||
|
@ -242,8 +244,8 @@ if (( RHEL_VERSION == 8 )) || (( UBUNTU_VERSION == 20 )); then
|
|||
rm -f glib-2.70.5.tar.xz
|
||||
cd glib-2.70.5
|
||||
meson setup _build --prefix="$RISCV"
|
||||
meson compile -C _build -j "${NUM_THREADS}" 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
meson install -C _build 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
meson compile -C _build -j "${NUM_THREADS}" 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
meson install -C _build 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
cd "$RISCV"
|
||||
rm -rf glib-2.70.5
|
||||
echo -e "${SUCCESS_COLOR}glib successfully installed!${ENDC}"
|
||||
|
@ -261,8 +263,8 @@ if (( RHEL_VERSION == 8 )); then
|
|||
rm -f gmp-6.3.0.tar.xz
|
||||
cd gmp-6.3.0
|
||||
./configure --prefix="$RISCV"
|
||||
make -j "${NUM_THREADS}" 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make install 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make -j "${NUM_THREADS}" 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make install 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
cd "$RISCV"
|
||||
rm -rf gmp-6.3.0
|
||||
echo -e "${SUCCESS_COLOR}gmp successfully installed!${ENDC}"
|
||||
|
@ -284,6 +286,21 @@ if (( UBUNTU_VERSION == 20 || DEBIAN_VERSION == 11 )) || [ "$FAMILY" == suse ];
|
|||
fi
|
||||
fi
|
||||
|
||||
# Newer version of CMake needed to build sail-riscv model (at least 3.20)
|
||||
if (( UBUNTU_VERSION == 20 || DEBIAN_VERSION == 11 )); then
|
||||
STATUS="cmake"
|
||||
if [ ! -e "$RISCV"/bin/cmake ]; then
|
||||
section_header "Installing cmake"
|
||||
cd "$RISCV"
|
||||
wget -nv --retry-connrefused $retry_on_host_error --output-document=cmake.tar.gz https://github.com/Kitware/CMake/releases/download/v3.31.5/cmake-3.31.5-linux-x86_64.tar.gz
|
||||
tar xz --directory="$RISCV" --strip-components=1 -f cmake.tar.gz
|
||||
rm -f cmake.tar.gz
|
||||
echo -e "${SUCCESS_COLOR}CMake successfully installed/updated!${ENDC}"
|
||||
else
|
||||
echo -e "${SUCCESS_COLOR}CMake already installed.${ENDC}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# RISC-V GNU Toolchain (https://github.com/riscv-collab/riscv-gnu-toolchain)
|
||||
# The RISC-V GNU Toolchain includes the GNU Compiler Collection (gcc), GNU Binutils, Newlib,
|
||||
# and the GNU Debugger Project (gdb). It is a collection of tools used to compile RISC-V programs.
|
||||
|
@ -297,7 +314,7 @@ if git_check "riscv-gnu-toolchain" "https://github.com/riscv/riscv-gnu-toolchain
|
|||
cd "$RISCV"/riscv-gnu-toolchain
|
||||
git reset --hard && git clean -f && git checkout master && git pull && git submodule update
|
||||
./configure --prefix="${RISCV}" --with-multilib-generator="rv32e-ilp32e--;rv32i-ilp32--;rv32im-ilp32--;rv32iac-ilp32--;rv32imac-ilp32--;rv32imafc-ilp32f--;rv32imafdc-ilp32d--;rv64i-lp64--;rv64ic-lp64--;rv64iac-lp64--;rv64imac-lp64--;rv64imafdc-lp64d--;rv64im-lp64--;"
|
||||
make -j "${NUM_THREADS}" 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make -j "${NUM_THREADS}" 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
if [ "$clean" ]; then
|
||||
cd "$RISCV"
|
||||
rm -rf riscv-gnu-toolchain
|
||||
|
@ -324,8 +341,8 @@ if git_check "elf2hex" "https://github.com/sifive/elf2hex.git" "$RISCV/bin/riscv
|
|||
git reset --hard && git clean -f && git checkout master && git pull
|
||||
autoreconf -i
|
||||
./configure --target=riscv64-unknown-elf --prefix="$RISCV"
|
||||
make 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make install 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make install 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
if [ "$clean" ]; then
|
||||
cd "$RISCV"
|
||||
rm -rf elf2hex
|
||||
|
@ -345,8 +362,8 @@ if git_check "qemu" "https://github.com/qemu/qemu" "$RISCV/include/qemu-plugin.h
|
|||
cd "$RISCV"/qemu
|
||||
git reset --hard && git clean -f && git checkout master && git pull
|
||||
./configure --target-list=riscv64-softmmu --prefix="$RISCV"
|
||||
make -j "${NUM_THREADS}" 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make install 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make -j "${NUM_THREADS}" 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make install 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
if [ "$clean" ]; then
|
||||
cd "$RISCV"
|
||||
rm -rf qemu
|
||||
|
@ -368,8 +385,8 @@ if git_check "riscv-isa-sim" "https://github.com/riscv-software-src/riscv-isa-si
|
|||
mkdir -p build
|
||||
cd build
|
||||
../configure --prefix="$RISCV"
|
||||
make -j "${NUM_THREADS}" 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make install 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make -j "${NUM_THREADS}" 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make install 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
if [ "$clean" ]; then
|
||||
cd "$RISCV"
|
||||
rm -rf riscv-isa-sim
|
||||
|
@ -393,8 +410,8 @@ if git_check "verilator" "https://github.com/verilator/verilator" "$RISCV/share/
|
|||
git reset --hard && git clean -f && git checkout master && git pull
|
||||
autoconf
|
||||
./configure --prefix="$RISCV"
|
||||
make -j "${NUM_THREADS}" 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make install 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make -j "${NUM_THREADS}" 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make install 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
if [ "$clean" ]; then
|
||||
cd "$RISCV"
|
||||
rm -rf verilator
|
||||
|
@ -411,7 +428,7 @@ fi
|
|||
# The Sail Compiler is written in OCaml, which is an object-oriented extension of ML, which in turn
|
||||
# is a functional programming language suited to formal verification.
|
||||
section_header "Installing/Updating Sail Compiler"
|
||||
STATUS="Sail Compiler"
|
||||
STATUS="sail_compiler"
|
||||
if [ ! -e "$RISCV"/bin/sail ]; then
|
||||
cd "$RISCV"
|
||||
wget -nv --retry-connrefused $retry_on_host_error --output-document=sail.tar.gz https://github.com/rems-project/sail/releases/latest/download/sail.tar.gz
|
||||
|
@ -426,13 +443,13 @@ fi
|
|||
# The RISC-V Sail Model is the golden reference model for RISC-V. It is written in Sail (described above)
|
||||
section_header "Installing/Updating RISC-V Sail Model"
|
||||
STATUS="riscv-sail-model"
|
||||
if git_check "sail-riscv" "https://github.com/riscv/sail-riscv.git" "$RISCV/bin/riscv_sim_RV32"; then
|
||||
if git_check "sail-riscv" "https://github.com/riscv/sail-riscv.git" "$RISCV/bin/riscv_sim_rv32d"; then
|
||||
cd "$RISCV"/sail-riscv
|
||||
git reset --hard && git clean -f && git checkout master && git pull
|
||||
ARCH=RV64 make -j "${NUM_THREADS}" c_emulator/riscv_sim_RV64 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
ARCH=RV32 make -j "${NUM_THREADS}" c_emulator/riscv_sim_RV32 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
cp -f c_emulator/riscv_sim_RV64 "$RISCV"/bin/riscv_sim_RV64
|
||||
cp -f c_emulator/riscv_sim_RV32 "$RISCV"/bin/riscv_sim_RV32
|
||||
cmake -S . -B build -DCMAKE_BUILD_TYPE=RelWithDebInfo -GNinja 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
cmake --build build 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
cp -f build/c_emulator/riscv_sim_rv64d "$RISCV"/bin/riscv_sim_rv64d
|
||||
cp -f build/c_emulator/riscv_sim_rv32d "$RISCV"/bin/riscv_sim_rv32d
|
||||
if [ "$clean" ]; then
|
||||
cd "$RISCV"
|
||||
rm -rf sail-riscv
|
||||
|
@ -446,7 +463,7 @@ fi
|
|||
# OSU Skywater 130 cell library (https://foss-eda-tools.googlesource.com/skywater-pdk/libs/sky130_osu_sc_t12)
|
||||
# The OSU Skywater 130 cell library is a standard cell library that is used to synthesize Wally.
|
||||
section_header "Installing/Updating OSU Skywater 130 cell library"
|
||||
STATUS="OSU Skywater 130 cell library"
|
||||
STATUS="osu_skywater_130_cell_library"
|
||||
mkdir -p "$RISCV"/cad/lib
|
||||
cd "$RISCV"/cad/lib
|
||||
if git_check "sky130_osu_sc_t12" "https://foss-eda-tools.googlesource.com/skywater-pdk/libs/sky130_osu_sc_t12" "$RISCV/cad/lib/sky130_osu_sc_t12" "main"; then
|
||||
|
@ -471,11 +488,11 @@ if [ ! "$no_buidroot" ]; then
|
|||
fi
|
||||
cd "$dir"/../linux
|
||||
if [ ! -e "$RISCV"/buildroot ]; then
|
||||
make 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
FORCE_UNSAFE_CONFIGURE=1 make 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ] # FORCE_UNSAFE_CONFIGURE is needed to allow buildroot to compile when run as root
|
||||
echo -e "${SUCCESS_COLOR}Buildroot successfully installed and Linux testvectors created!${ENDC}"
|
||||
elif [ ! -e "$RISCV"/linux-testvectors ]; then
|
||||
echo -e "${OK_COLOR}Buildroot already exists, but Linux testvectors are missing. Generating them now.${ENDC}"
|
||||
make dumptvs 2>&1 | logger $STATUS; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
make dumptvs 2>&1 | logger; [ "${PIPESTATUS[0]}" == 0 ]
|
||||
echo -e "${SUCCESS_COLOR}Linux testvectors successfully generated!${ENDC}"
|
||||
else
|
||||
echo -e "${OK_COLOR}Buildroot and Linux testvectors already exist.${ENDC}"
|
||||
|
@ -489,7 +506,7 @@ fi
|
|||
# The site-setup script is used to set up the environment for the RISC-V tools and EDA tools by setting
|
||||
# the PATH and other environment variables. It also sources the Python virtual environment.
|
||||
section_header "Downloading Site Setup Script"
|
||||
STATUS="site-setup scripts"
|
||||
STATUS="site-setup_scripts"
|
||||
cd "$RISCV"
|
||||
if [ ! -e "${RISCV}"/site-setup.sh ]; then
|
||||
wget -nv --retry-connrefused $retry_on_host_error https://raw.githubusercontent.com/openhwgroup/cvw/main/site-setup.sh
|
||||
|
|
22
bin/wsim
22
bin/wsim
|
@ -16,7 +16,7 @@ import os
|
|||
import sys
|
||||
|
||||
# Global variable
|
||||
WALLY = os.environ.get('WALLY')
|
||||
WALLY = os.environ.get("WALLY")
|
||||
|
||||
def parseArgs():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
@ -41,13 +41,13 @@ def validateArgs(args):
|
|||
if not args.testsuite and not args.elf:
|
||||
print("Error: Missing test suite or ELF file")
|
||||
sys.exit(1)
|
||||
if any([args.lockstep, args.lockstepverbose, args.fcov]) and not (args.testsuite.endswith('.elf') or args.elf) and args.testsuite != "buildroot":
|
||||
if any([args.lockstep, args.lockstepverbose, args.fcov]) and not (args.testsuite.endswith(".elf") or args.elf) and args.testsuite != "buildroot":
|
||||
print(f"Invalid Options. Cannot run a testsuite, {args.testsuite} with lockstep or fcov. Must run a single elf or buildroot.")
|
||||
sys.exit(1)
|
||||
elif any([args.gui, args.ccov, args.fcov, args.lockstep, args.lockstepverbose]) and args.sim not in ["questa", "vcs"]:
|
||||
print("Option only supported for Questa and VCS")
|
||||
sys.exit(1)
|
||||
elif (args.tb == "testbench_fp" and args.sim != "questa"):
|
||||
elif args.tb == "testbench_fp" and args.sim != "questa":
|
||||
print("Error: testbench_fp presently only supported by Questa, not VCS or Verilator, because of a touchy testbench")
|
||||
sys.exit(1)
|
||||
elif (args.config == "breker" and args.sim != "questa"):
|
||||
|
@ -61,11 +61,11 @@ def elfFileCheck(args):
|
|||
elif args.elf:
|
||||
print(f"ELF file not found: {args.elf}")
|
||||
sys.exit(1)
|
||||
elif args.testsuite.endswith('.elf'): # No --elf argument; check if testsuite has a .elf extension and use that instead
|
||||
elif args.testsuite.endswith(".elf"): # No --elf argument; check if testsuite has a .elf extension and use that instead
|
||||
if os.path.isfile(args.testsuite):
|
||||
ElfFile = os.path.abspath(args.testsuite)
|
||||
# extract the elf name from the path to be the test suite
|
||||
fields = args.testsuite.rsplit('/', 3)
|
||||
fields = args.testsuite.rsplit("/", 3)
|
||||
# if the name is just ref.elf in a deep path (riscv-arch-test/wally-riscv-arch-test), then use the directory name as the test suite to make it unique; otherwise work directory will have duplicates.
|
||||
if "breker" in args.testsuite:
|
||||
args.testsuite = fields[-1]
|
||||
|
@ -74,8 +74,8 @@ def elfFileCheck(args):
|
|||
args.testsuite = f"{fields[1]}_{fields[3]}"
|
||||
else:
|
||||
args.testsuite = f"{fields[2]}_{fields[3]}"
|
||||
elif '/' in args.testsuite:
|
||||
args.testsuite=args.testsuite.rsplit('/', 1)[1] # strip off path if present
|
||||
elif "/" in args.testsuite:
|
||||
args.testsuite = args.testsuite.rsplit("/", 1)[1] # strip off path if present
|
||||
else:
|
||||
print(f"ELF file not found: {args.testsuite}")
|
||||
sys.exit(1)
|
||||
|
@ -116,9 +116,9 @@ def prepSim(args, ElfFile):
|
|||
defineList.append("+define+USE_TREK_DV")
|
||||
argsList.append(f"+TREK_TBX_FILE={ElfFileNoExtension}.tbx")
|
||||
# Combine into a single string
|
||||
args.args += " ".join(argsList)
|
||||
args.params += " ".join(paramsList)
|
||||
args.define += " ".join(defineList)
|
||||
args.args += " " + " ".join(argsList)
|
||||
args.params += " " + " ".join(paramsList)
|
||||
args.define += " " + " ".join(defineList)
|
||||
flags = " ".join(flagsList)
|
||||
return flags, prefix
|
||||
|
||||
|
@ -154,7 +154,7 @@ def runQuesta(args, flags, prefix):
|
|||
args.params = fr'--params \"{args.params}\"'
|
||||
if args.define:
|
||||
args.define = fr'--define \"{args.define}\"'
|
||||
# Questa cannot accept more than 9 arguments. fcov implies lockstep
|
||||
# fcov implies lockstep
|
||||
cmd = f"do wally.do {args.config} {args.testsuite} {args.tb} {args.args} {args.params} {args.define} {flags}"
|
||||
cmd = f'cd $WALLY/sim/questa; {prefix} vsim {"-c" if not args.gui else ""} -do "{cmd}"'
|
||||
print(f"Running Questa with command: {cmd}")
|
||||
|
|
|
@ -212,7 +212,7 @@ localparam PLIC_SPI_ID = 32'd6;
|
|||
localparam PLIC_SDC_ID = 32'd9;
|
||||
|
||||
// Branch prediction
|
||||
localparam logic BPRED_SUPPORTED = 1;
|
||||
localparam logic BPRED_SUPPORTED = 0;
|
||||
localparam BPRED_TYPE = `BP_GSHARE; // BP_GSHARE_BASIC, BP_GLOBAL, BP_GLOBAL_BASIC, BP_TWOBIT
|
||||
localparam BPRED_SIZE = 32'd10;
|
||||
localparam BPRED_NUM_LHR = 32'd6;
|
||||
|
|
|
@ -58,3 +58,8 @@
|
|||
`include "EndianS_coverage.svh"
|
||||
`include "ExceptionsM_coverage.svh"
|
||||
`include "ExceptionsZc_coverage.svh"
|
||||
`include "ZicntrU_coverage.svh"
|
||||
`include "ZicntrS_coverage.svh"
|
||||
`include "ZicntrM_coverage.svh"
|
||||
`include "ZfaZfhD_coverage.svh"
|
||||
`include "ZfhminD_coverage.svh"
|
||||
|
|
|
@ -57,6 +57,12 @@
|
|||
`include "EndianS_coverage.svh"
|
||||
`include "ExceptionsM_coverage.svh"
|
||||
`include "ExceptionsZc_coverage.svh"
|
||||
`include "ZicntrU_coverage.svh"
|
||||
`include "ZicntrS_coverage.svh"
|
||||
`include "ZicntrM_coverage.svh"
|
||||
`include "ZfaZfhD_coverage.svh"
|
||||
`include "ZfhminD_coverage.svh"
|
||||
|
||||
// `include "RV64VM_PMP_coverage.svh"
|
||||
// `include "RV64CBO_VM_coverage.svh"
|
||||
// `include "RV64CBO_PMP_coverage.svh"
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import shutil, os
|
||||
import shutil
|
||||
import os
|
||||
|
||||
# if WALLY is defined, then get it
|
||||
WALLY_HOME = os.getenv("WALLY")
|
||||
|
@ -10,4 +11,4 @@ BUILDROOT_SRC = "linux/buildroot-config-src/wally"
|
|||
TESTVECTOR_SRC = "linux/testvector-generation"
|
||||
|
||||
shutil.copytree(os.path.join(WALLY_HOME, BUILDROOT_SRC), "./buildroot-config-src")
|
||||
shutil.copytree(os.path.join(WALLY_HOME, TESTVECTOR_SRC), "./testvector-generation")
|
||||
shutil.copytree(os.path.join(WALLY_HOME, TESTVECTOR_SRC), "./testvector-generation")
|
||||
|
|
6
examples/exercises/17p1/17p1.c
Normal file
6
examples/exercises/17p1/17p1.c
Normal file
|
@ -0,0 +1,6 @@
|
|||
#include <string.h>
|
||||
|
||||
int main(void) {
|
||||
char str[] = "Hello Wally!";
|
||||
return strlen(str);
|
||||
}
|
11
examples/exercises/17p1/Makefile
Normal file
11
examples/exercises/17p1/Makefile
Normal file
|
@ -0,0 +1,11 @@
|
|||
TARGET = 17p1
|
||||
|
||||
$(TARGET).objdump: $(TARGET).elf
|
||||
riscv64-unknown-elf-objdump -D $(TARGET).elf > $(TARGET).objdump
|
||||
|
||||
$(TARGET).elf: $(TARGET).c Makefile
|
||||
riscv64-unknown-elf-gcc -g -o $(TARGET) -march=rv64gc_zbb -mabi=lp64 -mcmodel=medany \
|
||||
-nostartfiles -T../../link/link.ld $(TARGET).c -o $(TARGET).elf
|
||||
|
||||
clean:
|
||||
rm -f $(TARGET).elf $(TARGET).objdump
|
10
examples/exercises/3p1/3p1.S
Normal file
10
examples/exercises/3p1/3p1.S
Normal file
|
@ -0,0 +1,10 @@
|
|||
.section .text.init
|
||||
.globl rvtest_entry_point
|
||||
|
||||
rvtest_entry_point:
|
||||
li t0, 0x42
|
||||
li t1, 0xED
|
||||
add t2, t0, t1
|
||||
|
||||
self_loop:
|
||||
j self_loop
|
13
examples/exercises/3p1/Makefile
Normal file
13
examples/exercises/3p1/Makefile
Normal file
|
@ -0,0 +1,13 @@
|
|||
TARGET = 3p1
|
||||
|
||||
$(TARGET).objdump: $(TARGET).elf
|
||||
riscv64-unknown-elf-objdump -D $(TARGET).elf > $(TARGET).objdump
|
||||
|
||||
$(TARGET).elf: $(TARGET).S Makefile
|
||||
riscv64-unknown-elf-gcc -g -o $(TARGET) -march=rv64gc -mabi=lp64 -mcmodel=medany \
|
||||
-nostartfiles -T../../link/link.ld $(TARGET).S -o $(TARGET).elf
|
||||
|
||||
clean:
|
||||
rm -f $(TARGET).elf $(TARGET).objdump
|
||||
|
||||
|
6
examples/exercises/3p13/3p13.c
Normal file
6
examples/exercises/3p13/3p13.c
Normal file
|
@ -0,0 +1,6 @@
|
|||
#include <string.h>
|
||||
|
||||
int main(void) {
|
||||
char str[] = "Hello Wally!";
|
||||
return strlen(str);
|
||||
}
|
11
examples/exercises/3p13/Makefile
Normal file
11
examples/exercises/3p13/Makefile
Normal file
|
@ -0,0 +1,11 @@
|
|||
TARGET = 3p13
|
||||
|
||||
$(TARGET).objdump: $(TARGET).elf
|
||||
riscv64-unknown-elf-objdump -D $(TARGET).elf > $(TARGET).objdump
|
||||
|
||||
$(TARGET).elf: $(TARGET).c Makefile
|
||||
riscv64-unknown-elf-gcc -g -o $(TARGET) -march=rv32gc -mabi=ilp32 -mcmodel=medany \
|
||||
-nostartfiles -T../../link/link.ld $(TARGET).c -o $(TARGET).elf
|
||||
|
||||
clean:
|
||||
rm -f $(TARGET).elf $(TARGET).objdump
|
19
examples/exercises/3p15/Makefile
Normal file
19
examples/exercises/3p15/Makefile
Normal file
|
@ -0,0 +1,19 @@
|
|||
TARGET = sumtest
|
||||
|
||||
$(TARGET).objdump: $(TARGET)
|
||||
riscv64-unknown-elf-objdump -D $(TARGET) > $(TARGET).objdump
|
||||
|
||||
$(TARGET): $(TARGET).S sum.S Makefile
|
||||
riscv64-unknown-elf-gcc -g -o $(TARGET) -march=rv64gc -mabi=lp64 -mcmodel=medany \
|
||||
-nostartfiles -T../../link/link.ld $(TARGET).S sum.S
|
||||
|
||||
sim:
|
||||
riscv_sim_RV64 -T $(TARGET).signature.output --signature-granularity 8 $(TARGET)
|
||||
diff --ignore-case $(TARGET).signature.output $(TARGET).reference_output || exit
|
||||
echo "Signature matches! Success!"
|
||||
|
||||
clean:
|
||||
rm -f $(TARGET) $(TARGET).objdump $(TARGET).signature.output
|
||||
|
||||
|
||||
|
32
examples/exercises/3p15/sum.S
Normal file
32
examples/exercises/3p15/sum.S
Normal file
|
@ -0,0 +1,32 @@
|
|||
// sum.s
|
||||
// David_Harris@hmc.edu 24 December 2021
|
||||
// Add up numbers from 1 to N.
|
||||
|
||||
// result in s0, i in s1, N in a0, return answer in a0
|
||||
//
|
||||
// long sum(long N) {
|
||||
// long result, i;
|
||||
// result = 0;
|
||||
// for (i=1; i<=N; i++) result = result + i;
|
||||
// return result;
|
||||
// }
|
||||
|
||||
.global sum
|
||||
sum:
|
||||
addi sp, sp, -16 # make room to save s0 and s1 on the stack
|
||||
sd s0, 0(sp)
|
||||
sd s1, 8(sp)
|
||||
|
||||
li s0, 0 # result = 0
|
||||
li s1, 1 # i = 1
|
||||
for: bgt s1, a0, done # exit loop if i > n
|
||||
add s0, s0, s1 # result = result + i
|
||||
addi s1, s1, 1 # i++
|
||||
j for # repeat
|
||||
|
||||
done:
|
||||
mv a0, s0 # put result in a0 to return
|
||||
ld s0, 0(sp) # restore s0 and s1 from stack
|
||||
ld s1, 8(sp)
|
||||
addi sp, sp, 16
|
||||
ret # return from function
|
43
examples/exercises/3p15/sumtest.S
Normal file
43
examples/exercises/3p15/sumtest.S
Normal file
|
@ -0,0 +1,43 @@
|
|||
// sumtest.S
|
||||
// David_Harris@hmc.edu 24 December 2021
|
||||
|
||||
.global rvtest_entry_point
|
||||
rvtest_entry_point:
|
||||
la sp, topofstack # Initialize stack pointer
|
||||
la t0, N # get address of N in data
|
||||
ld a0, 0(t0) # load N
|
||||
csrr s8, instret # count instructions before call
|
||||
jal sum # call sum(N)
|
||||
csrr s9, instret # count instructions after call
|
||||
sub s9, s9, s8 # length of call
|
||||
la t0, begin_signature # address of signature
|
||||
sd a0, 0(t0) # store sum(N) in signature
|
||||
sd s9, 8(t0) # record performance
|
||||
|
||||
write_tohost:
|
||||
la t1, tohost
|
||||
li t0, 1 # 1 for success, 3 for failure
|
||||
sd t0, 0(t1) # send success code
|
||||
|
||||
self_loop:
|
||||
j self_loop # wait
|
||||
|
||||
.section .tohost
|
||||
tohost: # write to HTIF
|
||||
.dword 0
|
||||
fromhost:
|
||||
.dword 0
|
||||
|
||||
.data
|
||||
N:
|
||||
.dword 6
|
||||
|
||||
.EQU XLEN,64
|
||||
begin_signature:
|
||||
.fill 2*(XLEN/32),4,0xdeadbeef #
|
||||
end_signature:
|
||||
|
||||
# Initialize stack with room for 512 bytes
|
||||
.bss
|
||||
.space 512
|
||||
topofstack:
|
2
examples/exercises/3p15/sumtest.reference_output
Normal file
2
examples/exercises/3p15/sumtest.reference_output
Normal file
|
@ -0,0 +1,2 @@
|
|||
0000000000000015
|
||||
0000000000000025
|
19
examples/exercises/3p3/Makefile
Normal file
19
examples/exercises/3p3/Makefile
Normal file
|
@ -0,0 +1,19 @@
|
|||
TARGET = sumtest
|
||||
|
||||
$(TARGET).objdump: $(TARGET)
|
||||
riscv64-unknown-elf-objdump -D $(TARGET) > $(TARGET).objdump
|
||||
|
||||
$(TARGET): $(TARGET).S sum.S Makefile
|
||||
riscv64-unknown-elf-gcc -g -o $(TARGET) -march=rv64gc -mabi=lp64 -mcmodel=medany \
|
||||
-nostartfiles -T../../link/link.ld $(TARGET).S sum.S
|
||||
|
||||
sim:
|
||||
spike +signature=$(TARGET).signature.output +signature-granularity=8 $(TARGET)
|
||||
diff --ignore-case $(TARGET).signature.output $(TARGET).reference_output || exit
|
||||
echo "Signature matches! Success!"
|
||||
|
||||
clean:
|
||||
rm -f $(TARGET) $(TARGET).objdump $(TARGET).signature.output
|
||||
|
||||
|
||||
|
32
examples/exercises/3p3/sum.S
Normal file
32
examples/exercises/3p3/sum.S
Normal file
|
@ -0,0 +1,32 @@
|
|||
// sum.s
|
||||
// David_Harris@hmc.edu 24 December 2021
|
||||
// Add up numbers from 1 to N.
|
||||
|
||||
// result in s0, i in s1, N in a0, return answer in a0
|
||||
//
|
||||
// long sum(long N) {
|
||||
// long result, i;
|
||||
// result = 0;
|
||||
// for (i=1; i<=N; i++) result = result + i;
|
||||
// return result;
|
||||
// }
|
||||
|
||||
.global sum
|
||||
sum:
|
||||
addi sp, sp, -16 # make room to save s0 and s1 on the stack
|
||||
sd s0, 0(sp)
|
||||
sd s1, 8(sp)
|
||||
|
||||
li s0, 0 # result = 0
|
||||
li s1, 1 # i = 1
|
||||
for: bgt s1, a0, done # exit loop if i > n
|
||||
add s0, s0, s1 # result = result + i
|
||||
addi s1, s1, 1 # i++
|
||||
j for # repeat
|
||||
|
||||
done:
|
||||
mv a0, s0 # put result in a0 to return
|
||||
ld s0, 0(sp) # restore s0 and s1 from stack
|
||||
ld s1, 8(sp)
|
||||
addi sp, sp, 16
|
||||
ret # return from function
|
43
examples/exercises/3p3/sumtest.S
Normal file
43
examples/exercises/3p3/sumtest.S
Normal file
|
@ -0,0 +1,43 @@
|
|||
// sumtest.S
|
||||
// David_Harris@hmc.edu 24 December 2021
|
||||
|
||||
.global rvtest_entry_point
|
||||
rvtest_entry_point:
|
||||
la sp, topofstack # Initialize stack pointer
|
||||
la t0, N # get address of N in data
|
||||
ld a0, 0(t0) # load N
|
||||
csrr s8, instret # count instructions before call
|
||||
jal sum # call sum(N)
|
||||
csrr s9, instret # count instructions after call
|
||||
sub s9, s9, s8 # length of call
|
||||
la t0, begin_signature # address of signature
|
||||
sd a0, 0(t0) # store sum(N) in signature
|
||||
sd s9, 8(t0) # record performance
|
||||
|
||||
write_tohost:
|
||||
la t1, tohost
|
||||
li t0, 1 # 1 for success, 3 for failure
|
||||
sd t0, 0(t1) # send success code
|
||||
|
||||
self_loop:
|
||||
j self_loop # wait
|
||||
|
||||
.section .tohost
|
||||
tohost: # write to HTIF
|
||||
.dword 0
|
||||
fromhost:
|
||||
.dword 0
|
||||
|
||||
.data
|
||||
N:
|
||||
.dword 6
|
||||
|
||||
.EQU XLEN,64
|
||||
begin_signature:
|
||||
.fill 2*(XLEN/32),4,0xdeadbeef #
|
||||
end_signature:
|
||||
|
||||
# Initialize stack with room for 512 bytes
|
||||
.bss
|
||||
.space 512
|
||||
topofstack:
|
2
examples/exercises/3p3/sumtest.reference_output
Normal file
2
examples/exercises/3p3/sumtest.reference_output
Normal file
|
@ -0,0 +1,2 @@
|
|||
0000000000000015
|
||||
0000000000000025
|
32
examples/exercises/3p5/Makefile
Normal file
32
examples/exercises/3p5/Makefile
Normal file
|
@ -0,0 +1,32 @@
|
|||
TARGET = matvecmul
|
||||
|
||||
$(TARGET).objdump: $(TARGET)
|
||||
riscv64-unknown-elf-objdump -S -D $(TARGET) > $(TARGET).objdump
|
||||
|
||||
$(TARGET): $(TARGET).c Makefile
|
||||
riscv64-unknown-elf-gcc -o $(TARGET) -gdwarf-2 -O\
|
||||
-march=rv64gc -mabi=lp64d -mcmodel=medany \
|
||||
-nostdlib -static -lm -fno-tree-loop-distribute-patterns \
|
||||
-T../../C/common/test.ld -I../../C/common \
|
||||
$(TARGET).c ../../C/common/crt.S ../../C/common/syscalls.c
|
||||
# Compiler flags:
|
||||
# -o $(TARGET) defines the name of the output file
|
||||
# -g generates debugging symbols for gdb
|
||||
# -O turns on basic optimization; -O3 turns on heavy optimization; omit for no optimization
|
||||
# -march=rv64gc -mabi=lp64d =mcmodel=medany generates code for RV64GC with doubles and long/ptrs = 64 bits
|
||||
# -static forces static linking (no dynamic shared libraries on bare metal)
|
||||
# -lm links the math library if necessary (when #include math.h)
|
||||
# -nostdlib avoids inserting standard startup files and default libraries
|
||||
# because we are using crt.s on bare metal
|
||||
# -fno-tree-loop-distribute-patterns turns replacing loops with memcpy/memset in the std library
|
||||
# -T specifies the linker file
|
||||
# -I specifies the include path (e.g. for util.h)
|
||||
# The last line defines the C files to compile.
|
||||
# crt.S is needed as our startup file to initialize the processor
|
||||
# syscalls.c implements printf through the HTIF for Spike
|
||||
# other flags from riscv-tests makefiles that don't seem to be important
|
||||
# -ffast-math -DPREALLOCATE=1 -std=gnu99 \
|
||||
# -fno-common -fno-builtin-printf -nostartfiles -lgcc \
|
||||
|
||||
clean:
|
||||
rm -f $(TARGET) $(TARGET).objdump
|
22
examples/exercises/3p5/matvecmul.c
Normal file
22
examples/exercises/3p5/matvecmul.c
Normal file
|
@ -0,0 +1,22 @@
|
|||
#include <stdio.h> // supports printf
|
||||
#include "util.h" // supports verify
|
||||
|
||||
// Matrix-vector multiplication y = Ax.
|
||||
// A is an m rows x n columns matrix.
|
||||
void matvecmul(int A[], int x[], int y[], int m, int n) {
|
||||
int i, j, sum;
|
||||
for (i=0; i<m; i = i + 1) {
|
||||
sum = 0;
|
||||
for (j=0; j<n; j = j + 1)
|
||||
sum = sum + A[i*n+j] * x[j];
|
||||
y[i] = sum;
|
||||
}
|
||||
}
|
||||
|
||||
void main(void) {
|
||||
int A[6] = {1, 2, 3, 4, 5, 6};
|
||||
int x[3] = {7, 8, 9};
|
||||
int y[2];
|
||||
|
||||
matvecmul(A, x, y, 2, 3);
|
||||
}
|
32
examples/exercises/3p7/Makefile
Normal file
32
examples/exercises/3p7/Makefile
Normal file
|
@ -0,0 +1,32 @@
|
|||
TARGET = fir
|
||||
|
||||
$(TARGET).objdump: $(TARGET)
|
||||
riscv64-unknown-elf-objdump -S -D $(TARGET) > $(TARGET).objdump
|
||||
|
||||
$(TARGET): $(TARGET).c Makefile
|
||||
riscv64-unknown-elf-gcc -o $(TARGET) -gdwarf-2 -O2\
|
||||
-march=rv64gc -mabi=lp64d -mcmodel=medany \
|
||||
-nostdlib -static -lm -fno-tree-loop-distribute-patterns \
|
||||
-T../../C/common/test.ld -I../../C/common \
|
||||
$(TARGET).c ../../C/common/crt.S ../../C/common/syscalls.c
|
||||
# Compiler flags:
|
||||
# -o $(TARGET) defines the name of the output file
|
||||
# -g generates debugging symbols for gdb
|
||||
# -O turns on basic optimization; -O3 turns on heavy optimization; omit for no optimization
|
||||
# -march=rv64gc -mabi=lp64d =mcmodel=medany generates code for RV64GC with doubles and long/ptrs = 64 bits
|
||||
# -static forces static linking (no dynamic shared libraries on bare metal)
|
||||
# -lm links the math library if necessary (when #include math.h)
|
||||
# -nostdlib avoids inserting standard startup files and default libraries
|
||||
# because we are using crt.s on bare metal
|
||||
# -fno-tree-loop-distribute-patterns turns replacing loops with memcpy/memset in the std library
|
||||
# -T specifies the linker file
|
||||
# -I specifies the include path (e.g. for util.h)
|
||||
# The last line defines the C files to compile.
|
||||
# crt.S is needed as our startup file to initialize the processor
|
||||
# syscalls.c implements printf through the HTIF for Spike
|
||||
# other flags from riscv-tests makefiles that don't seem to be important
|
||||
# -ffast-math -DPREALLOCATE=1 -std=gnu99 \
|
||||
# -fno-common -fno-builtin-printf -nostartfiles -lgcc \
|
||||
|
||||
clean:
|
||||
rm -f $(TARGET) $(TARGET).objdump
|
83
examples/exercises/3p7/fir.c
Normal file
83
examples/exercises/3p7/fir.c
Normal file
|
@ -0,0 +1,83 @@
|
|||
#include <stdio.h> // supports printf
|
||||
#include "util.h" // supports verify
|
||||
|
||||
// Add two Q1.31 fixed point numbers
|
||||
int add_q31(int a, int b) {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
// Multiplly two Q1.31 fixed point numbers
|
||||
int mul_q31(int a, int b) {
|
||||
long res = (long)a * (long)b;
|
||||
int result = res >> 31; // shift right to get the 32-bit result; this is equivalent to shifting left by 1 and discarding the bottom 32 bits
|
||||
//printf("mul_q31: a = %x, b = %x, res = %lx, result = %x\n", a, b, res, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
// low pass filter x with coefficients c, result in y
|
||||
// n is the length of x, m is the length of c
|
||||
// y[i] = c[0]*x[i] + c[1]*x[i+1] + ... + c[m-1]*x[i+m-1]
|
||||
// inputs in Q1.31 format
|
||||
void fir(int x[], int c[], int y[], int n, int m) {
|
||||
int i, j;
|
||||
for (j=0; j<n-m+1; j++) {
|
||||
y[j] = 0;
|
||||
for (i=0; i<m; i++)
|
||||
y[j] = add_q31(y[j], mul_q31(c[i], x[j-i+(m-1)]));
|
||||
}
|
||||
}
|
||||
|
||||
int main(void) {
|
||||
int32_t sin_table[20] = { // in Q1.31 format
|
||||
0x00000000, // sin(0*2pi/10)
|
||||
0x4B3C8C12, // sin(1*2pi/10)
|
||||
0x79BC384D, // sin(2*2pi/10)
|
||||
0x79BC384D, // sin(3*2pi/10)
|
||||
0x4B3C8C12, // sin(4*2pi/10)
|
||||
0x00000000, // sin(5*2pi/10)
|
||||
0xB4C373EE, // sin(6*2pi/10)
|
||||
0x8643C7B3, // sin(7*2pi/10)
|
||||
0x8643C7B3, // sin(8*2pi/10)
|
||||
0xB4C373EE, // sin(9*2pi/10)
|
||||
0x00000000, // sin(10*2pi/10)
|
||||
0x4B3C8C12, // sin(11*2pi/10)
|
||||
0x79BC384D, // sin(12*2pi/10)
|
||||
0x79BC384D, // sin(13*2pi/10)
|
||||
0x4B3C8C12, // sin(14*2pi/10)
|
||||
0x00000000, // sin(15*2pi/10)
|
||||
0xB4C373EE, // sin(16*2pi/10)
|
||||
0x8643C7B3, // sin(17*2pi/10)
|
||||
0x8643C7B3, // sin(18*2pi/10)
|
||||
0xB4C373EE // sin(19*2pi/10)
|
||||
};
|
||||
int lowpass[4] = {0x20000001, 0x20000002, 0x20000003, 0x20000004}; // 1/4 in Q1.31 format
|
||||
int y[17];
|
||||
int expected[17] = { // in Q1.31 format
|
||||
0x4fad3f2f,
|
||||
0x627c6236,
|
||||
0x4fad3f32,
|
||||
0x1e6f0e17,
|
||||
0xe190f1eb,
|
||||
0xb052c0ce,
|
||||
0x9d839dc6,
|
||||
0xb052c0cb,
|
||||
0xe190f1e6,
|
||||
0x1e6f0e12,
|
||||
0x4fad3f2f,
|
||||
0x627c6236,
|
||||
0x4fad3f32,
|
||||
0x1e6f0e17,
|
||||
0xe190f1eb,
|
||||
0xb052c0ce,
|
||||
0x9d839dc6
|
||||
};
|
||||
|
||||
setStats(1); // record initial mcycle and minstret
|
||||
fir(sin_table, lowpass, y, 20, 4);
|
||||
setStats(0); // record elapsed mcycle and minstret
|
||||
for (int i=0; i<17; i++) {
|
||||
printf("y[%d] = %x\n", i, y[i]);
|
||||
}
|
||||
return verify(16, y, expected); // check the 1 element of s matches expected. 0 means success
|
||||
}
|
32
examples/exercises/3p9/Makefile
Normal file
32
examples/exercises/3p9/Makefile
Normal file
|
@ -0,0 +1,32 @@
|
|||
TARGET = inline
|
||||
|
||||
$(TARGET).objdump: $(TARGET)
|
||||
riscv64-unknown-elf-objdump -S -D $(TARGET) > $(TARGET).objdump
|
||||
|
||||
$(TARGET): $(TARGET).c Makefile
|
||||
riscv64-unknown-elf-gcc -o $(TARGET) -gdwarf-2 -O\
|
||||
-march=rv64gc -mabi=lp64d -mcmodel=medany \
|
||||
-nostdlib -static -lm -fno-tree-loop-distribute-patterns \
|
||||
-T../../C/common/test.ld -I../../C/common \
|
||||
$(TARGET).c ../../C/common/crt.S ../../C/common/syscalls.c
|
||||
# Compiler flags:
|
||||
# -o $(TARGET) defines the name of the output file
|
||||
# -g generates debugging symbols for gdb
|
||||
# -O turns on basic optimization; -O3 turns on heavy optimization; omit for no optimization
|
||||
# -march=rv64gc -mabi=lp64d =mcmodel=medany generates code for RV64GC with doubles and long/ptrs = 64 bits
|
||||
# -static forces static linking (no dynamic shared libraries on bare metal)
|
||||
# -lm links the math library if necessary (when #include math.h)
|
||||
# -nostdlib avoids inserting standard startup files and default libraries
|
||||
# because we are using crt.s on bare metal
|
||||
# -fno-tree-loop-distribute-patterns turns replacing loops with memcpy/memset in the std library
|
||||
# -T specifies the linker file
|
||||
# -I specifies the include path (e.g. for util.h)
|
||||
# The last line defines the C files to compile.
|
||||
# crt.S is needed as our startup file to initialize the processor
|
||||
# syscalls.c implements printf through the HTIF for Spike
|
||||
# other flags from riscv-tests makefiles that don't seem to be important
|
||||
# -ffast-math -DPREALLOCATE=1 -std=gnu99 \
|
||||
# -fno-common -fno-builtin-printf -nostartfiles -lgcc \
|
||||
|
||||
clean:
|
||||
rm -f $(TARGET) $(TARGET).objdump
|
11
examples/exercises/3p9/inline.c
Normal file
11
examples/exercises/3p9/inline.c
Normal file
|
@ -0,0 +1,11 @@
|
|||
#include <stdio.h> // supports printf
|
||||
int main(void) {
|
||||
int a = 3;
|
||||
int b = 4;
|
||||
int c;
|
||||
// compute c = a + 2*b using inline assembly
|
||||
asm volatile("slli %0, %1, 1" : "=r" (c) : "r" (b)); // c = b << 1
|
||||
asm volatile("add %0, %1, %2" : "=r" (c) : "r" (a), "r" (c)); // c = a + c
|
||||
|
||||
printf("c = %d\n", c);
|
||||
}
|
16
examples/exercises/fma16/Makefile
Normal file
16
examples/exercises/fma16/Makefile
Normal file
|
@ -0,0 +1,16 @@
|
|||
|
||||
|
||||
CC = gcc
|
||||
CFLAGS = -O3 -Wno-format-overflow
|
||||
IFLAGS = -I$(WALLY)/addins/berkeley-softfloat-3/source/include/
|
||||
LIBS = $(WALLY)/addins/berkeley-softfloat-3/build/Linux-x86_64-GCC/softfloat.a -lm -lquadmath
|
||||
SRCS = $(wildcard *.c)
|
||||
PROGS = $(patsubst %.c,%,$(SRCS))
|
||||
|
||||
all: $(PROGS)
|
||||
|
||||
%: %.c
|
||||
$(CC) $(CFLAGS) -DSOFTFLOAT_FAST_INT64 $(IFLAGS) $(LFLAGS) -o $@ $< $(LIBS)
|
||||
|
||||
clean:
|
||||
rm -f $(PROGS)
|
23
examples/exercises/fma16/fma.do
Normal file
23
examples/exercises/fma16/fma.do
Normal file
|
@ -0,0 +1,23 @@
|
|||
# fma.do
|
||||
#
|
||||
# run with vsim -do "do fma.do"
|
||||
# add -c before -do for batch simulation
|
||||
|
||||
onbreak {resume}
|
||||
|
||||
# create library
|
||||
vlib worklib
|
||||
|
||||
vlog -lint -sv -work worklib fma16.sv testbench.sv
|
||||
vopt +acc worklib.testbench_fma16 -work worklib -o testbenchopt
|
||||
vsim -lib worklib testbenchopt
|
||||
|
||||
add wave sim:/testbench_fma16/clk
|
||||
add wave sim:/testbench_fma16/reset
|
||||
add wave sim:/testbench_fma16/x
|
||||
add wave sim:/testbench_fma16/y
|
||||
add wave sim:/testbench_fma16/z
|
||||
add wave sim:/testbench_fma16/result
|
||||
add wave sim:/testbench_fma16/rexpected
|
||||
|
||||
run -all
|
147
examples/exercises/fma16/fma16_testgen.c
Normal file
147
examples/exercises/fma16/fma16_testgen.c
Normal file
|
@ -0,0 +1,147 @@
|
|||
// fma16_testgen.c
|
||||
// David_Harris 8 February 2025
|
||||
// Generate tests for 16-bit FMA
|
||||
// SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
#include "softfloat.h"
|
||||
#include "softfloat_types.h"
|
||||
|
||||
typedef union sp {
|
||||
float32_t v;
|
||||
float f;
|
||||
} sp;
|
||||
|
||||
// lists of tests, terminated with 0x8000
|
||||
uint16_t easyExponents[] = {15, 0x8000};
|
||||
uint16_t easyFracts[] = {0, 0x200, 0x8000}; // 1.0 and 1.1
|
||||
|
||||
void softfloatInit(void) {
|
||||
softfloat_roundingMode = softfloat_round_minMag;
|
||||
softfloat_exceptionFlags = 0;
|
||||
softfloat_detectTininess = softfloat_tininess_beforeRounding;
|
||||
}
|
||||
|
||||
float convFloat(float16_t f16) {
|
||||
float32_t f32;
|
||||
float res;
|
||||
sp r;
|
||||
|
||||
// convert half to float for printing
|
||||
f32 = f16_to_f32(f16);
|
||||
r.v = f32;
|
||||
res = r.f;
|
||||
return res;
|
||||
}
|
||||
|
||||
void genCase(FILE *fptr, float16_t x, float16_t y, float16_t z, int mul, int add, int negp, int negz, int roundingMode, int zeroAllowed, int infAllowed, int nanAllowed) {
|
||||
float16_t result;
|
||||
int op, flagVals;
|
||||
char calc[80], flags[80];
|
||||
float32_t x32, y32, z32, r32;
|
||||
float xf, yf, zf, rf;
|
||||
float16_t smallest;
|
||||
|
||||
if (!mul) y.v = 0x3C00; // force y to 1 to avoid multiply
|
||||
if (!add) z.v = 0x0000; // force z to 0 to avoid add
|
||||
if (negp) x.v ^= 0x8000; // flip sign of x to negate p
|
||||
if (negz) z.v ^= 0x8000; // flip sign of z to negate z
|
||||
op = roundingMode << 4 | mul<<3 | add<<2 | negp<<1 | negz;
|
||||
// printf("op = %02x rm %d mul %d add %d negp %d negz %d\n", op, roundingMode, mul, add, negp, negz);
|
||||
softfloat_exceptionFlags = 0; // clear exceptions
|
||||
result = f16_mulAdd(x, y, z); // call SoftFloat to compute expected result
|
||||
|
||||
// Extract expected flags from SoftFloat
|
||||
sprintf(flags, "NV: %d OF: %d UF: %d NX: %d",
|
||||
(softfloat_exceptionFlags >> 4) % 2,
|
||||
(softfloat_exceptionFlags >> 2) % 2,
|
||||
(softfloat_exceptionFlags >> 1) % 2,
|
||||
(softfloat_exceptionFlags) % 2);
|
||||
// pack these four flags into one nibble, discarding DZ flag
|
||||
flagVals = softfloat_exceptionFlags & 0x7 | ((softfloat_exceptionFlags >> 1) & 0x8);
|
||||
|
||||
// convert to floats for printing
|
||||
xf = convFloat(x);
|
||||
yf = convFloat(y);
|
||||
zf = convFloat(z);
|
||||
rf = convFloat(result);
|
||||
if (mul)
|
||||
if (add) sprintf(calc, "%f * %f + %f = %f", xf, yf, zf, rf);
|
||||
else sprintf(calc, "%f * %f = %f", xf, yf, rf);
|
||||
else sprintf(calc, "%f + %f = %f", xf, zf, rf);
|
||||
|
||||
// omit denorms, which aren't required for this project
|
||||
smallest.v = 0x0400;
|
||||
float16_t resultmag = result;
|
||||
resultmag.v &= 0x7FFF; // take absolute value
|
||||
if (f16_lt(resultmag, smallest) && (resultmag.v != 0x0000)) fprintf (fptr, "// skip denorm: ");
|
||||
if ((softfloat_exceptionFlags) >> 1 % 2) fprintf(fptr, "// skip underflow: ");
|
||||
|
||||
// skip special cases if requested
|
||||
if (resultmag.v == 0x0000 && !zeroAllowed) fprintf(fptr, "// skip zero: ");
|
||||
if ((resultmag.v == 0x7C00 || resultmag.v == 0x7BFF) && !infAllowed) fprintf(fptr, "// Skip inf: ");
|
||||
if (resultmag.v > 0x7C00 && !nanAllowed) fprintf(fptr, "// Skip NaN: ");
|
||||
|
||||
// print the test case
|
||||
fprintf(fptr, "%04x_%04x_%04x_%02x_%04x_%01x // %s %s\n", x.v, y.v, z.v, op, result.v, flagVals, calc, flags);
|
||||
}
|
||||
|
||||
void prepTests(uint16_t *e, uint16_t *f, char *testName, char *desc, float16_t *cases,
|
||||
FILE *fptr, int *numCases) {
|
||||
int i, j;
|
||||
|
||||
// Loop over all of the exponents and fractions, generating and counting all cases
|
||||
fprintf(fptr, "%s", desc); fprintf(fptr, "\n");
|
||||
*numCases=0;
|
||||
for (i=0; e[i] != 0x8000; i++)
|
||||
for (j=0; f[j] != 0x8000; j++) {
|
||||
cases[*numCases].v = f[j] | e[i]<<10;
|
||||
*numCases = *numCases + 1;
|
||||
}
|
||||
}
|
||||
|
||||
void genMulTests(uint16_t *e, uint16_t *f, int sgn, char *testName, char *desc, int roundingMode, int zeroAllowed, int infAllowed, int nanAllowed) {
|
||||
int i, j, k, numCases;
|
||||
float16_t x, y, z;
|
||||
float16_t cases[100000];
|
||||
FILE *fptr;
|
||||
char fn[80];
|
||||
|
||||
sprintf(fn, "work/%s.tv", testName);
|
||||
if ((fptr = fopen(fn, "w")) == 0) {
|
||||
printf("Error opening to write file %s. Does directory exist?\n", fn);
|
||||
exit(1);
|
||||
}
|
||||
prepTests(e, f, testName, desc, cases, fptr, &numCases);
|
||||
z.v = 0x0000;
|
||||
for (i=0; i < numCases; i++) {
|
||||
x.v = cases[i].v;
|
||||
for (j=0; j<numCases; j++) {
|
||||
y.v = cases[j].v;
|
||||
for (k=0; k<=sgn; k++) {
|
||||
y.v ^= (k<<15);
|
||||
genCase(fptr, x, y, z, 1, 0, 0, 0, roundingMode, zeroAllowed, infAllowed, nanAllowed);
|
||||
}
|
||||
}
|
||||
}
|
||||
fclose(fptr);
|
||||
}
|
||||
|
||||
int main()
|
||||
{
|
||||
if (system("mkdir -p work") != 0) exit(1); // create work directory if it doesn't exist
|
||||
softfloatInit(); // configure softfloat modes
|
||||
|
||||
// Test cases: multiplication
|
||||
genMulTests(easyExponents, easyFracts, 0, "fmul_0", "// Multiply with exponent of 0, significand of 1.0 and 1.1, RZ", 0, 0, 0, 0);
|
||||
|
||||
/* // example of how to generate tests with a different rounding mode
|
||||
softfloat_roundingMode = softfloat_round_near_even;
|
||||
genMulTests(easyExponents, easyFracts, 0, "fmul_0_rne", "// Multiply with exponent of 0, significand of 1.0 and 1.1, RNE", 1, 0, 0, 0); */
|
||||
|
||||
// Add your cases here
|
||||
|
||||
return 0;
|
||||
}
|
12
examples/exercises/fma16/lint-fma
Executable file
12
examples/exercises/fma16/lint-fma
Executable file
|
@ -0,0 +1,12 @@
|
|||
#!/bin/bash
|
||||
# check for warnings in Verilog code
|
||||
# The verilator lint tool is faster and better than Questa so it is best to run this first.
|
||||
verilator=`which verilator`
|
||||
|
||||
basepath=$(dirname $0)/..
|
||||
if ($verilator --lint-only --top-module fma16 fma16.sv); then
|
||||
echo "fma16 passed lint"
|
||||
else
|
||||
echo "fma16 failed lint"
|
||||
fi
|
||||
|
2
examples/exercises/fma16/sim-fma
Executable file
2
examples/exercises/fma16/sim-fma
Executable file
|
@ -0,0 +1,2 @@
|
|||
vsim -do "do fma.do"
|
||||
|
1
examples/exercises/fma16/sim-fma-batch
Executable file
1
examples/exercises/fma16/sim-fma-batch
Executable file
|
@ -0,0 +1 @@
|
|||
vsim -c -do "do fma.do"
|
52
examples/exercises/fma16/testbench.sv
Normal file
52
examples/exercises/fma16/testbench.sv
Normal file
|
@ -0,0 +1,52 @@
|
|||
/* verilator lint_off STMTDLY */
|
||||
module testbench_fma16;
|
||||
logic clk, reset;
|
||||
logic [15:0] x, y, z, rexpected, result;
|
||||
logic [7:0] ctrl;
|
||||
logic mul, add, negp, negz;
|
||||
logic [1:0] roundmode;
|
||||
logic [31:0] vectornum, errors;
|
||||
logic [75:0] testvectors[10000:0];
|
||||
logic [3:0] flags, flagsexpected; // Invalid, Overflow, Underflow, Inexact
|
||||
|
||||
// instantiate device under test
|
||||
fma16 dut(x, y, z, mul, add, negp, negz, roundmode, result, flags);
|
||||
|
||||
// generate clock
|
||||
always
|
||||
begin
|
||||
clk = 1; #5; clk = 0; #5;
|
||||
end
|
||||
|
||||
// at start of test, load vectors and pulse reset
|
||||
initial
|
||||
begin
|
||||
$readmemh("tests/fmul_0.tv", testvectors);
|
||||
vectornum = 0; errors = 0;
|
||||
reset = 1; #22; reset = 0;
|
||||
end
|
||||
|
||||
// apply test vectors on rising edge of clk
|
||||
always @(posedge clk)
|
||||
begin
|
||||
#1; {x, y, z, ctrl, rexpected, flagsexpected} = testvectors[vectornum];
|
||||
{roundmode, mul, add, negp, negz} = ctrl[5:0];
|
||||
end
|
||||
|
||||
// check results on falling edge of clk
|
||||
always @(negedge clk)
|
||||
if (~reset) begin // skip during reset
|
||||
if (result !== rexpected /* | flags !== flagsexpected */) begin // check result
|
||||
$display("Error: inputs %h * %h + %h", x, y, z);
|
||||
$display(" result = %h (%h expected) flags = %b (%b expected)",
|
||||
result, rexpected, flags, flagsexpected);
|
||||
errors = errors + 1;
|
||||
end
|
||||
vectornum = vectornum + 1;
|
||||
if (testvectors[vectornum] === 'x) begin
|
||||
$display("%d tests completed with %d errors",
|
||||
vectornum, errors);
|
||||
$stop;
|
||||
end
|
||||
end
|
||||
endmodule
|
5
examples/exercises/fma16/tests/fmul_0.tv
Normal file
5
examples/exercises/fma16/tests/fmul_0.tv
Normal file
|
@ -0,0 +1,5 @@
|
|||
// Multiply with exponent of 0, significand of 1.0 and 1.1, RZ
|
||||
3c00_3c00_0000_08_3c00_0 // 1.000000 * 1.000000 = 1.000000 NV: 0 OF: 0 UF: 0 NX: 0
|
||||
3c00_3e00_0000_08_3e00_0 // 1.000000 * 1.500000 = 1.500000 NV: 0 OF: 0 UF: 0 NX: 0
|
||||
3e00_3c00_0000_08_3e00_0 // 1.500000 * 1.000000 = 1.500000 NV: 0 OF: 0 UF: 0 NX: 0
|
||||
3e00_3e00_0000_08_4080_0 // 1.500000 * 1.500000 = 2.250000 NV: 0 OF: 0 UF: 0 NX: 0
|
|
@ -13,7 +13,7 @@ def main(args):
|
|||
probenum = 0
|
||||
countLines = 1
|
||||
|
||||
with open(args[0],'r') as xdcfile, open(args[1], 'w') as outfile:
|
||||
with open(args[0]) as xdcfile, open(args[1], 'w') as outfile:
|
||||
Lines = xdcfile.readlines()
|
||||
for line in Lines:
|
||||
t = re.sub("probe[0-9]+", f"probe{probenum}",line)
|
||||
|
|
150
linux/Makefile
150
linux/Makefile
|
@ -1,34 +1,43 @@
|
|||
BUILDROOT := buildroot
|
||||
IMAGES := ${BUILDROOT}/output/images
|
||||
WALLYLINUX := $(WALLY)/linux
|
||||
DIS := ${IMAGES}/disassembly
|
||||
BUILDROOT := buildroot
|
||||
IMAGE_DIR := ${BUILDROOT}/output/images
|
||||
DISASSEMBLY_DIR := ${IMAGE_DIR}/disassembly
|
||||
WALLYLINUX := $(WALLY)/linux
|
||||
BR2_EXTERNAL_TREE := $(WALLYLINUX)/br2-external-tree
|
||||
LINUX_TESTVECTORS := $(RISCV)/linux-testvectors
|
||||
|
||||
# set sudo if needed depending on $RISCV
|
||||
SUDO := $(shell mkdir -p $(RISCV)/.test > /dev/null 2>&1 || echo sudo)
|
||||
BUILDROOT_OUTPUTS := Image fw_jump.bin fw_jump.elf rootfs.cpio vmlinux busybox
|
||||
BUILDROOT_OUTPUTS := $(foreach name, $(BUILDROOT_OUTPUTS), $(IMAGE_DIR)/$(name))
|
||||
|
||||
# Device tree files
|
||||
DTS ?= $(shell find devicetree -type f -regex ".*\.dts" | sort)
|
||||
DTB := $(DTS:%.dts=%.dtb)
|
||||
DTB := $(foreach name, $(DTB), $(IMAGES)/$(shell basename $(name)))
|
||||
DTS ?= $(wildcard devicetree/*.dts)
|
||||
DTB := $(foreach name, $(DTS:%.dts=%.dtb), $(IMAGE_DIR)/$(notdir $(name)))
|
||||
|
||||
# Disassembly stuff
|
||||
# Disassembly files
|
||||
BINARIES := fw_jump.elf vmlinux busybox
|
||||
OBJDUMPS := $(foreach name, $(BINARIES), $(basename $(name) .elf))
|
||||
OBJDUMPS := $(foreach name, $(OBJDUMPS), $(DIS)/$(name).objdump)
|
||||
OBJDUMPS := $(foreach name, $(basename $(BINARIES) .elf), $(DISASSEMBLY_DIR)/$(name).objdump)
|
||||
|
||||
.PHONY: all generate disassemble install clean cleanDTB check_write_permissions check_environment
|
||||
# Testvector files
|
||||
RAW_RAM_FILE := ${LINUX_TESTVECTORS}/ramGDB.bin
|
||||
RAM_FILE := ${LINUX_TESTVECTORS}/ram.bin
|
||||
RAW_BOOTMEM_FILE := ${LINUX_TESTVECTORS}/bootmemGDB.bin
|
||||
BOOTMEM_FILE := ${LINUX_TESTVECTORS}/bootmem.bin
|
||||
|
||||
all: check_environment check_write_permissions clean download Image disassemble install dumptvs
|
||||
.PHONY: all check_environment check_write_permissions config build disassemble devicetrees install dumptvs clean cleanDTB
|
||||
|
||||
check_environment: $(RISCV)
|
||||
# Default target
|
||||
all: check_write_permissions clean config build disassemble install dumptvs
|
||||
|
||||
# Check if the environment variables are set correctly
|
||||
check_environment: $(RISCV) $(WALLY)
|
||||
ifeq ($(findstring :$(RISCV)/lib:,:$(LD_LIBRARY_PATH):),)
|
||||
@(echo "ERROR: Your environment variables are not set correctly." >&2 \
|
||||
&& echo "Make sure to source setup.sh or install buildroot using the wally-tool-chain-install.sh script." >&2 \
|
||||
&& exit 1)
|
||||
endif
|
||||
|
||||
check_write_permissions:
|
||||
# Check if the user has write permissions to the RISCV directory, potentially using sudo
|
||||
SUDO := $(shell mkdir -p $(RISCV)/.test > /dev/null 2>&1 || echo sudo)
|
||||
check_write_permissions: check_environment
|
||||
ifeq ($(SUDO), sudo)
|
||||
@echo "Cannot write to '$(RISCV)'." \
|
||||
"Using sudo (you may be prompted for your password several times throughout the install)"
|
||||
|
@ -38,75 +47,92 @@ endif
|
|||
&& exit 1)
|
||||
@$(SUDO) rm -r $(RISCV)/.test
|
||||
|
||||
Image: check_environment
|
||||
bash -c "unset LD_LIBRARY_PATH; $(MAKE) -C $(BUILDROOT)"
|
||||
$(MAKE) generate
|
||||
@echo "Buildroot Image successfully generated."
|
||||
# Build buildroot and device tree binaries
|
||||
build: $(BUILDROOT_OUTPUTS) devicetrees
|
||||
|
||||
install: check_write_permissions check_environment
|
||||
# Build buildroot itself
|
||||
# LD_LIBRARY_PATH must be unset to avoid conflicts between the host and cross compiler
|
||||
$(BUILDROOT_OUTPUTS) $(IMAGE_DIR): check_environment $(BUILDROOT)
|
||||
bash -c "unset LD_LIBRARY_PATH; $(MAKE) -C $(BUILDROOT)"
|
||||
|
||||
# Install buildroot to $RISCV
|
||||
install: check_write_permissions
|
||||
$(SUDO) rm -rf $(RISCV)/$(BUILDROOT)
|
||||
$(SUDO) mv $(BUILDROOT) $(RISCV)/$(BUILDROOT)
|
||||
@echo "Buildroot successfully installed."
|
||||
|
||||
dumptvs: check_write_permissions check_environment
|
||||
$(SUDO) mkdir -p $(RISCV)/linux-testvectors
|
||||
./genInitMem.sh
|
||||
@echo "Testvectors successfully generated."
|
||||
# Generate linux boot testvectors
|
||||
dumptvs: ${RAM_FILE} ${BOOTMEM_FILE}
|
||||
|
||||
generate: $(DTB) $(IMAGES)
|
||||
# Format QEMU memory dumps for use as testvectors
|
||||
${LINUX_TESTVECTORS}/%.bin: ${LINUX_TESTVECTORS}/%GDB.bin
|
||||
truncate -s %8 $^ # Extend file to 8 byte multiple
|
||||
objcopy --reverse-bytes=8 -F binary $^ $@ # Reverse bytes
|
||||
|
||||
$(IMAGES)/%.dtb: ./devicetree/%.dts
|
||||
# Generate memory dumps from QEMU buildroot boot
|
||||
TCP_PORT := 1235
|
||||
${LINUX_TESTVECTORS}/%GDB.bin: | $(LINUX_TESTVECTORS)
|
||||
${WALLYLINUX}/qemuBoot.sh --gdb ${TCP_PORT} &
|
||||
riscv64-unknown-elf-gdb -batch \
|
||||
-ex "target remote :${TCP_PORT}" \
|
||||
-ex "maintenance packet Qqemu.PhyMemMode:1" \
|
||||
-ex "printf \"Creating ${RAW_BOOTMEM_FILE}\n\"" \
|
||||
-ex "dump binary memory ${RAW_BOOTMEM_FILE} 0x1000 0x1fff" \
|
||||
-ex "printf \"Creating ${RAW_RAM_FILE}\n\"" \
|
||||
-ex "dump binary memory ${RAW_RAM_FILE} 0x80000000 0x8fffffff" \
|
||||
-ex "kill"
|
||||
|
||||
# Generate device tree binaries
|
||||
devicetrees: $(DTB)
|
||||
$(IMAGE_DIR)/%.dtb: ${WALLYLINUX}/devicetree/%.dts | $(IMAGE_DIR)
|
||||
dtc -I dts -O dtb $< > $@
|
||||
|
||||
$(IMAGES):
|
||||
@ echo "No output/images directory in buildroot."
|
||||
@ echo "Run make --jobs in buildroot directory before generating device tree binaries."; exit 1
|
||||
# Create disassembly files
|
||||
disassemble: check_environment $(OBJDUMPS) $(DISASSEMBLY_DIR)/rootfs
|
||||
|
||||
$(RISCV):
|
||||
@ echo "ERROR: No $(RISCV) directory. Make sure you have installed the Wally Toolchain."
|
||||
@ echo "and sourced setup.sh"
|
||||
|
||||
# Disassembly rules ---------------------------------------------------
|
||||
disassemble: check_environment
|
||||
rm -rf $(BUILDROOT)/output/images/disassembly
|
||||
find $(BUILDROOT)/output/build/linux-* -maxdepth 1 -name "vmlinux" | xargs cp -t $(BUILDROOT)/output/images/
|
||||
mkdir -p $(DIS)
|
||||
$(MAKE) $(OBJDUMPS)
|
||||
# extract rootfs
|
||||
mkdir -p $(BUILDROOT)/output/images/disassembly/rootfs
|
||||
# Extract rootfs
|
||||
$(DISASSEMBLY_DIR)/rootfs: $(IMAGE_DIR)/rootfs.cpio
|
||||
@echo "Ignore error about dev/console when extracting rootfs from rootfs.cpio"
|
||||
-cpio -i -D $(BUILDROOT)/output/images/disassembly/rootfs < $(BUILDROOT)/output/images/rootfs.cpio
|
||||
@echo "Disassembly successfully completed."
|
||||
-cpio -id -D $(DISASSEMBLY_DIR)/rootfs -F $(IMAGE_DIR)/rootfs.cpio
|
||||
|
||||
$(DIS)/%.objdump: $(IMAGES)/%.elf
|
||||
riscv64-unknown-elf-objdump -DS $< >> $@
|
||||
$(WALLY)/bin/extractFunctionRadix.sh $@
|
||||
|
||||
$(DIS)/%.objdump: $(IMAGES)/%
|
||||
# Disassemble binaries
|
||||
$(DISASSEMBLY_DIR)/%.objdump: $(IMAGE_DIR)/% | $(DISASSEMBLY_DIR)
|
||||
riscv64-unknown-elf-objdump -S $< >> $@
|
||||
$(WALLY)/bin/extractFunctionRadix.sh $@
|
||||
|
||||
$(IMAGES)/vmlinux:
|
||||
linuxDir=$$(find $(BUILDROOT)/output/build -maxdepth 2 -type d -regex ".*/linux-[0-9]+\.[0-9]+\.[0-9]+$$") ;\
|
||||
cp $$linuxDir/vmlinux $@ ;\
|
||||
# Disassemble binaries ending in .elf
|
||||
$(DISASSEMBLY_DIR)/%.objdump: $(IMAGE_DIR)/%.elf | $(DISASSEMBLY_DIR)
|
||||
riscv64-unknown-elf-objdump -SD $< >> $@
|
||||
$(WALLY)/bin/extractFunctionRadix.sh $@
|
||||
|
||||
$(IMAGES)/busybox:
|
||||
busyboxDir=$$(find $(BUILDROOT)/output/build -maxdepth 2 -type d -regex ".*/busybox-[0-9]+\.[0-9]+\.[0-9]+$$") ;\
|
||||
cp $$busyboxDir/busybox $@ ;\
|
||||
|
||||
# Generating new Buildroot directories --------------------------------
|
||||
download: $(BUILDROOT)
|
||||
# Load wally buildroot configuration
|
||||
config: $(BUILDROOT) $(BR2_EXTERNAL_TREE)/configs/wally_defconfig
|
||||
$(MAKE) -C $(BUILDROOT) wally_defconfig BR2_EXTERNAL=$(BR2_EXTERNAL_TREE)
|
||||
@echo "Buildroot successfully download."
|
||||
|
||||
# Clone buildroot and checkout the correct version
|
||||
$(BUILDROOT):
|
||||
git clone https://github.com/buildroot/buildroot.git $@
|
||||
cd $@; git checkout 2024.11.x
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# Create directories
|
||||
$(LINUX_TESTVECTORS): check_write_permissions
|
||||
$(SUDO) mkdir -p $@
|
||||
|
||||
$(DISASSEMBLY_DIR):
|
||||
mkdir -p $@
|
||||
|
||||
# Remove device tree binaries
|
||||
cleanDTB:
|
||||
rm -f $(IMAGES)/*.dtb
|
||||
rm -f $(IMAGE_DIR)/*.dtb
|
||||
|
||||
# Remove buildroot directory
|
||||
clean:
|
||||
rm -rf $(BUILDROOT)
|
||||
|
||||
# Check if the RISCV environment variable is set
|
||||
$(RISCV):
|
||||
@ echo "ERROR: No $(RISCV) directory. Make sure you have installed the Wally Toolchain."
|
||||
@ echo "and sourced setup.sh"
|
||||
|
||||
# Check if the WALLY environment variable is set
|
||||
$(WALLY):
|
||||
@ echo "ERROR: $$WALLY is not set. Make sure you have sourced setup.sh"
|
||||
|
|
|
@ -35,7 +35,7 @@ The device tree files for the various FPGAs Wally supports, as well as QEMU's de
|
|||
They are built automatically using the main `make` command. To build the device tree binaries (.dtb) from the device tree sources (.dts) separately, we can build all of them at once using:
|
||||
|
||||
```bash
|
||||
$ make generate # optionally override BUILDROOT
|
||||
$ make devicetrees # optionally override BUILDROOT
|
||||
```
|
||||
|
||||
The .dts files will end up in the `<BUILDROOT>/output/images` folder of your chosen buildroot directory.
|
||||
|
|
5
linux/br2-external-tree/board/wally/post_image.sh
Executable file
5
linux/br2-external-tree/board/wally/post_image.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Copy linux and busybox binaries (with symbol info) to images directory
|
||||
cp "$BUILD_DIR"/linux-*/vmlinux "$BINARIES_DIR"/vmlinux
|
||||
cp "$BUILD_DIR"/busybox-*/busybox "$BINARIES_DIR"/busybox
|
|
@ -10,6 +10,7 @@ BR2_GNU_MIRROR="http://ftpmirror.gnu.org"
|
|||
BR2_ENABLE_DEBUG=y
|
||||
BR2_DEBUG_3=y
|
||||
# BR2_STRIP_strip is not set
|
||||
BR2_PER_PACKAGE_DIRECTORIES=y
|
||||
# BR2_PIC_PIE is not set
|
||||
BR2_SSP_NONE=y
|
||||
BR2_RELRO_NONE=y
|
||||
|
@ -19,6 +20,7 @@ BR2_TARGET_GENERIC_ISSUE="Greetings! This RISC-V Linux image was built for Wally
|
|||
BR2_ROOTFS_DEVICE_TABLE_SUPPORTS_EXTENDED_ATTRIBUTES=y
|
||||
BR2_SYSTEM_DHCP="eth0"
|
||||
BR2_ROOTFS_OVERLAY="$(BR2_EXTERNAL_WALLY_PATH)/board/wally/rootfs_overlay"
|
||||
BR2_ROOTFS_POST_IMAGE_SCRIPT="$(BR2_EXTERNAL_WALLY_PATH)/board/wally/post_image.sh"
|
||||
BR2_LINUX_KERNEL=y
|
||||
BR2_LINUX_KERNEL_CUSTOM_VERSION=y
|
||||
BR2_LINUX_KERNEL_CUSTOM_VERSION_VALUE="6.12.8"
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
status = "okay";
|
||||
compatible = "riscv";
|
||||
riscv,isa-base = "rv64i";
|
||||
riscv,isa-extensions = "i", "m", "a", "f", "d", "c", "sstc", "svadu", "svinval", "svnapot", "svpbmt", "zba", "zbb", "zbc", "zbs", "zca", "zcb", "zcd", "zfa", "zfh", "zkn", "zkt", "zicbom", "zicboz", "zicntr", "zicond", "zicsr", "zifencei", "zihpm";
|
||||
riscv,isa-extensions = "i", "m", "a", "f", "d", "c", "sstc", "svade", "svadu", "svinval", "svnapot", "svpbmt", "zba", "zbb", "zbc", "zbs", "zca", "zcb", "zcd", "zfa", "zfh", "zkn", "zkt", "zicbom", "zicboz", "zicntr", "zicond", "zicsr", "zifencei", "zihpm";
|
||||
riscv,cboz-block-size = <64>;
|
||||
riscv,cbom-block-size = <64>;
|
||||
mmu-type = "riscv,sv48";
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
status = "okay";
|
||||
compatible = "riscv";
|
||||
riscv,isa-base = "rv64i";
|
||||
riscv,isa-extensions = "i", "m", "a", "f", "d", "c", "sstc", "svadu", "svinval", "svnapot", "svpbmt", "zba", "zbb", "zbc", "zbs", "zca", "zcb", "zcd", "zfa", "zfh", "zkn", "zkt", "zicbom", "zicboz", "zicntr", "zicond", "zicsr", "zifencei", "zihpm";
|
||||
riscv,isa-extensions = "i", "m", "a", "f", "d", "c", "sstc", "svade", "svadu", "svinval", "svnapot", "svpbmt", "zba", "zbb", "zbc", "zbs", "zca", "zcb", "zcd", "zfa", "zfh", "zkn", "zkt", "zicbom", "zicboz", "zicntr", "zicond", "zicsr", "zifencei", "zihpm";
|
||||
riscv,cboz-block-size = <64>;
|
||||
riscv,cbom-block-size = <64>;
|
||||
mmu-type = "riscv,sv48";
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
status = "okay";
|
||||
compatible = "riscv";
|
||||
riscv,isa-base = "rv64i";
|
||||
riscv,isa-extensions = "i", "m", "a", "f", "d", "c", "sstc", "svadu", "svinval", "svnapot", "svpbmt", "zba", "zbb", "zbc", "zbs", "zca", "zcb", "zcd", "zfa", "zfh", "zkn", "zkt", "zicbom", "zicboz", "zicntr", "zicond", "zicsr", "zifencei", "zihpm";
|
||||
riscv,isa-extensions = "i", "m", "a", "f", "d", "c", "sstc", "svade", "svadu", "svinval", "svnapot", "svpbmt", "zba", "zbb", "zbc", "zbs", "zca", "zcb", "zcd", "zfa", "zfh", "zkn", "zkt", "zicbom", "zicboz", "zicntr", "zicond", "zicsr", "zifencei", "zihpm";
|
||||
riscv,cboz-block-size = <64>;
|
||||
riscv,cbom-block-size = <64>;
|
||||
mmu-type = "riscv,sv48";
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
status = "okay";
|
||||
compatible = "riscv";
|
||||
riscv,isa-base = "rv64i";
|
||||
riscv,isa-extensions = "i", "m", "a", "f", "d", "c", "sstc", "svadu", "svinval", "svnapot", "svpbmt", "zba", "zbb", "zbc", "zbs", "zca", "zcb", "zcd", "zfa", "zfh", "zkn", "zkt", "zicbom", "zicboz", "zicntr", "zicond", "zicsr", "zifencei", "zihpm";
|
||||
riscv,isa-extensions = "i", "m", "a", "f", "d", "c", "sstc", "svade", "svadu", "svinval", "svnapot", "svpbmt", "zba", "zbb", "zbc", "zbs", "zca", "zcb", "zcd", "zfa", "zfh", "zkn", "zkt", "zicbom", "zicboz", "zicntr", "zicond", "zicsr", "zifencei", "zihpm";
|
||||
riscv,cboz-block-size = <64>;
|
||||
riscv,cbom-block-size = <64>;
|
||||
mmu-type = "riscv,sv48";
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
tcpPort=1235
|
||||
imageDir=$RISCV/buildroot/output/images
|
||||
tvDir=$RISCV/linux-testvectors
|
||||
rawRamFile="$tvDir/ramGDB.bin"
|
||||
ramFile="$tvDir/ram.bin"
|
||||
rawBootmemFile="$tvDir/bootmemGDB.bin"
|
||||
bootmemFile="$tvDir/bootmem.bin"
|
||||
rawUntrimmedBootmemFile="$tvDir/untrimmedBootmemFileGDB.bin"
|
||||
DEVICE_TREE=${imageDir}/wally-virt.dtb
|
||||
|
||||
if ! mkdir -p "$tvDir"; then
|
||||
echo "Error: unable to create linux testvector directory $tvDir!">&2
|
||||
echo "Please try running as sudo.">&2
|
||||
exit 1
|
||||
fi
|
||||
if ! test -w "$tvDir"; then
|
||||
echo "Using sudo to gain access to $tvDir"
|
||||
if ! sudo chmod -R a+rw "$tvDir"; then
|
||||
echo "Error: insuffcient write privileges for linux testvector directory $tvDir !">&2
|
||||
echo "Please chmod it. For example:">&2
|
||||
echo " sudo chmod -R a+rw $tvDir">&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Launching QEMU in replay mode!"
|
||||
(qemu-system-riscv64 \
|
||||
-M virt -m 256M -dtb "$DEVICE_TREE" \
|
||||
-nographic \
|
||||
-bios "$imageDir"/fw_jump.bin -kernel "$imageDir"/Image -append "root=/dev/vda ro" -initrd "$imageDir"/rootfs.cpio \
|
||||
-gdb tcp::$tcpPort -S) \
|
||||
& riscv64-unknown-elf-gdb --quiet \
|
||||
-ex "set pagination off" \
|
||||
-ex "set logging overwrite on" \
|
||||
-ex "set logging redirect on" \
|
||||
-ex "set confirm off" \
|
||||
-ex "target extended-remote :$tcpPort" \
|
||||
-ex "maintenance packet Qqemu.PhyMemMode:1" \
|
||||
-ex "printf \"Creating $rawBootmemFile\n\"" \
|
||||
-ex "dump binary memory $rawBootmemFile 0x1000 0x1fff" \
|
||||
-ex "printf \"Creating $rawRamFile\n\"" \
|
||||
-ex "dump binary memory $rawRamFile 0x80000000 0x8fffffff" \
|
||||
-ex "kill" \
|
||||
-ex "q"
|
||||
|
||||
echo "Changing Endianness"
|
||||
# Extend files to 8 byte multiple
|
||||
truncate -s %8 "$rawRamFile"
|
||||
truncate -s %8 "$rawBootmemFile"
|
||||
# Reverse bytes
|
||||
objcopy --reverse-bytes=8 -F binary "$rawRamFile" "$ramFile"
|
||||
objcopy --reverse-bytes=8 -F binary "$rawBootmemFile" "$bootmemFile"
|
||||
rm -f "$rawRamFile" "$rawBootmemFile" "$rawUntrimmedBootmemFile"
|
||||
|
||||
echo "genInitMem.sh completed!"
|
||||
echo "You may want to restrict write access to $tvDir now and give cad ownership of it."
|
||||
echo "Run the following:"
|
||||
echo " sudo chown -R cad:cad $tvDir"
|
||||
echo " sudo chmod -R go-w $tvDir"
|
42
linux/qemuBoot.sh
Executable file
42
linux/qemuBoot.sh
Executable file
|
@ -0,0 +1,42 @@
|
|||
#!/bin/bash
|
||||
###########################################
|
||||
## Boot linux on QEMU configured to match Wally
|
||||
##
|
||||
## Written: Jordan Carlin, jcarlin@hmc.edu
|
||||
## Created: 20 January 2025
|
||||
## Modified:
|
||||
##
|
||||
## A component of the CORE-V-WALLY configurable RISC-V project.
|
||||
## https://github.com/openhwgroup/cvw
|
||||
##
|
||||
## Copyright (C) 2021-25 Harvey Mudd College & Oklahoma State University
|
||||
##
|
||||
## SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
|
||||
##
|
||||
## Licensed under the Solderpad Hardware License v 2.1 (the “License”); you may not use this file
|
||||
## except in compliance with the License, or, at your option, the Apache License version 2.0. You
|
||||
## may obtain a copy of the License at
|
||||
##
|
||||
## https:##solderpad.org/licenses/SHL-2.1/
|
||||
##
|
||||
## Unless required by applicable law or agreed to in writing, any work distributed under the
|
||||
## License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
## either express or implied. See the License for the specific language governing permissions
|
||||
## and limitations under the License.
|
||||
################################################################################################
|
||||
|
||||
BUILDROOT="${BUILDROOT:-$RISCV/buildroot}"
|
||||
IMAGES="$BUILDROOT"/output/images
|
||||
|
||||
if [[ "$1" == "--gdb" && -n "$2" ]]; then
|
||||
GDB_FLAG="-gdb tcp::$2 -S"
|
||||
fi
|
||||
|
||||
qemu-system-riscv64 \
|
||||
-M virt -m 256M -nographic \
|
||||
-bios "$IMAGES"/fw_jump.bin \
|
||||
-kernel "$IMAGES"/Image \
|
||||
-initrd "$IMAGES"/rootfs.cpio \
|
||||
-dtb "$IMAGES"/wally-virt.dtb \
|
||||
-cpu rva22s64,zicond=true,zfa=true,zfh=true,zcb=true,zbc=true,zkn=true,sstc=true,svadu=true,svnapot=true \
|
||||
$GDB_FLAG
|
|
@ -53,7 +53,7 @@ add wave -noupdate -group {Execution Stage} /testbench/dut/core/ifu/PCE
|
|||
add wave -noupdate -group {Execution Stage} /testbench/dut/core/ifu/InstrE
|
||||
add wave -noupdate -group {Execution Stage} /testbench/InstrEName
|
||||
add wave -noupdate -group {Execution Stage} /testbench/dut/core/ieu/c/InstrValidE
|
||||
add wave -noupdate -group {Execution Stage} /testbench/FunctionName/FunctionName/FunctionName
|
||||
add wave -noupdate -group {Execution Stage} /testbench/functionName/functionName/FunctionName
|
||||
add wave -noupdate -expand -group {Memory Stage} /testbench/dut/core/PCM
|
||||
add wave -noupdate -expand -group {Memory Stage} /testbench/dut/core/InstrM
|
||||
add wave -noupdate -expand -group {Memory Stage} /testbench/InstrMName
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
onerror {resume}
|
||||
quietly virtual signal -install /testbench/dut/core/ifu/bpred/bpred { /testbench/dut/core/ifu/bpred/bpred/PostSpillInstrRawF[11:7]} rd
|
||||
quietly WaveActivateNextPane {} 0
|
||||
add wave -noupdate /testbench/clk
|
||||
add wave -noupdate /testbench/reset
|
||||
add wave -noupdate /testbench/totalerrors
|
||||
add wave -noupdate /testbench/memfilename
|
||||
add wave -noupdate /testbench/dut/core/SATP_REGW
|
||||
add wave -noupdate /testbench/dut/core/InstrValidM
|
||||
add wave -noupdate -expand -group HDU -group hazards /testbench/dut/core/hzu/RetM
|
||||
add wave -noupdate -expand -group HDU -group hazards -color Pink /testbench/dut/core/hzu/TrapM
|
||||
add wave -noupdate -expand -group HDU -group hazards /testbench/dut/core/ieu/c/LoadStallD
|
||||
add wave -noupdate -expand -group HDU -group hazards /testbench/dut/core/ifu/IFUStallF
|
||||
add wave -noupdate -expand -group HDU -group hazards /testbench/dut/core/hzu/BPWrongE
|
||||
add wave -noupdate -expand -group HDU -group hazards /testbench/dut/core/hzu/LSUStallM
|
||||
add wave -noupdate -expand -group HDU -group hazards /testbench/dut/core/ieu/c/MDUStallD
|
||||
add wave -noupdate -expand -group HDU -group hazards /testbench/dut/core/hzu/DivBusyE
|
||||
add wave -noupdate -expand -group HDU -group hazards /testbench/dut/core/hzu/FDivBusyE
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/hzu/RetM
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards -color Pink /testbench/dut/core/hzu/TrapM
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/ieu/c/LoadStallD
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/ifu/IFUStallF
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/ifu/IFUCacheBusStallF
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/ifu/ICacheStallF
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/ifu/BusStall
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/hzu/BPWrongE
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/hzu/LSUStallM
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/ieu/c/MDUStallD
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/hzu/DivBusyE
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/hzu/FDivBusyE
|
||||
add wave -noupdate -expand -group HDU -expand -group hazards /testbench/dut/core/hzu/FetchBufferStallF
|
||||
add wave -noupdate -expand -group HDU -group traps /testbench/dut/core/priv/priv/trap/InstrMisalignedFaultM
|
||||
add wave -noupdate -expand -group HDU -group traps /testbench/dut/core/priv/priv/trap/InstrAccessFaultM
|
||||
add wave -noupdate -expand -group HDU -group traps /testbench/dut/core/priv/priv/trap/IllegalInstrFaultM
|
||||
|
@ -34,25 +38,67 @@ add wave -noupdate -expand -group HDU -expand -group Flush -color Yellow /testbe
|
|||
add wave -noupdate -expand -group HDU -expand -group Flush -color Yellow /testbench/dut/core/FlushE
|
||||
add wave -noupdate -expand -group HDU -expand -group Flush -color Yellow /testbench/dut/core/FlushM
|
||||
add wave -noupdate -expand -group HDU -expand -group Flush -color Yellow /testbench/dut/core/FlushW
|
||||
add wave -noupdate -expand -group HDU -group Stall -color Orange /testbench/dut/core/StallF
|
||||
add wave -noupdate -expand -group HDU -group Stall -color Orange /testbench/dut/core/StallD
|
||||
add wave -noupdate -expand -group HDU -group Stall -color Orange /testbench/dut/core/StallE
|
||||
add wave -noupdate -expand -group HDU -group Stall -color Orange /testbench/dut/core/StallM
|
||||
add wave -noupdate -expand -group HDU -group Stall -color Orange /testbench/dut/core/StallW
|
||||
add wave -noupdate -expand -group HDU -expand -group Stall -color Orange /testbench/dut/core/StallF
|
||||
add wave -noupdate -expand -group HDU -expand -group Stall -color Orange /testbench/dut/core/StallD
|
||||
add wave -noupdate -expand -group HDU -expand -group Stall -color Orange /testbench/dut/core/StallE
|
||||
add wave -noupdate -expand -group HDU -expand -group Stall -color Orange /testbench/dut/core/StallM
|
||||
add wave -noupdate -expand -group HDU -expand -group Stall -color Orange /testbench/dut/core/StallW
|
||||
add wave -noupdate -expand -group HDU -expand -group Stall /testbench/dut/core/ifu/StallFBF
|
||||
add wave -noupdate -expand -group HDU -expand -group Stall /testbench/dut/core/ifu/NoStallPCF
|
||||
add wave -noupdate -expand -group HDU -group interrupts /testbench/dut/core/priv/priv/trap/PendingIntsM
|
||||
add wave -noupdate -expand -group HDU -group interrupts /testbench/dut/core/priv/priv/trap/InstrValidM
|
||||
add wave -noupdate -expand -group HDU -group interrupts /testbench/dut/core/priv/priv/trap/ValidIntsM
|
||||
add wave -noupdate -expand -group HDU -group interrupts /testbench/dut/core/hzu/WFIInterruptedM
|
||||
add wave -noupdate /testbench/dut/core/StallW
|
||||
add wave -noupdate /testbench/dut/core/hzu/StallWCause
|
||||
add wave -noupdate /testbench/dut/core/hzu/IFUStallF
|
||||
add wave -noupdate -label {Contributors: IFUStallF} -group {Contributors: sim:/testbench/dut/core/hzu/IFUStallF} /testbench/dut/core/ifu/IFUCacheBusStallF
|
||||
add wave -noupdate -label {Contributors: IFUStallF} -group {Contributors: sim:/testbench/dut/core/hzu/IFUStallF} /testbench/dut/core/ifu/SelSpillNextF
|
||||
add wave -noupdate /testbench/dut/core/ifu/IFUCacheBusStallF
|
||||
add wave -noupdate -label {Contributors: IFUCacheBusStallF} -group {Contributors: sim:/testbench/dut/core/ifu/IFUCacheBusStallF} /testbench/dut/core/ifu/BusStall
|
||||
add wave -noupdate -label {Contributors: IFUCacheBusStallF} -group {Contributors: sim:/testbench/dut/core/ifu/IFUCacheBusStallF} /testbench/dut/core/ifu/ICacheStallF
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/InstrFName
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/CacheTagMem/ce}
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/CacheTagMem/addr}
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/CacheTagMem/ram/addrd}
|
||||
add wave -noupdate -group {instruction pipeline} -radix decimal {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/CacheSetTag}
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/ValidBits}
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/PAdr}
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/ReadTag}
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/InvalidateCacheDelay}
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/ValidWay}
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/HitWay}
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/FlushCache}
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/SelVictim}
|
||||
add wave -noupdate -group {instruction pipeline} {/testbench/dut/core/ifu/bus/icache/icache/CacheWays[0]/SelectedWay}
|
||||
add wave -noupdate -group {instruction pipeline} -expand /testbench/dut/core/ifu/bus/icache/icache/ReadDataLineWay
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/bus/icache/icache/ReadDataLineCache
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/bus/icache/icache/SelFetchBuffer
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/bus/icache/icache/WordOffsetAddr
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/bus/icache/icache/ReadDataLine
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/ICacheInstrF
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/SelIROM
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/CacheableF
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/Spill/spill/InstrRawF
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/Spill/spill/SelSpillF
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/PostSpillInstrRawF
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/InstrD
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/InstrE
|
||||
add wave -noupdate -group {instruction pipeline} /testbench/dut/core/ifu/InstrM
|
||||
add wave -noupdate -group PCS /testbench/dut/core/ifu/PCNextF
|
||||
add wave -noupdate -group PCS /testbench/dut/core/ifu/PCF
|
||||
add wave -noupdate -group PCS /testbench/dut/core/ifu/PCD
|
||||
add wave -noupdate -group PCS /testbench/dut/core/PCE
|
||||
add wave -noupdate -group PCS /testbench/dut/core/PCM
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/CompressedF
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/PCPlus2or4F
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/BPWrongE
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/PC1NextF
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/CSRWriteFenceM
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/PC2NextF
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/RetM
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/TrapM
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/UnalignedPCNextF
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/PCNextF
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/PCF
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/ifu/PCD
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/PCE
|
||||
add wave -noupdate -expand -group PCS /testbench/dut/core/PCM
|
||||
add wave -noupdate -group {PCNext Generation} /testbench/dut/core/ifu/PCPlus2or4F
|
||||
add wave -noupdate -group {PCNext Generation} /testbench/dut/core/ifu/IEUAdrE
|
||||
add wave -noupdate -group {PCNext Generation} /testbench/dut/core/ifu/PCSrcE
|
||||
|
@ -69,7 +115,6 @@ add wave -noupdate -group {PCNext Generation} -expand -group pcmux3 /testbench/d
|
|||
add wave -noupdate -group {PCNext Generation} -expand -group pcmux3 /testbench/dut/core/ifu/UnalignedPCNextF
|
||||
add wave -noupdate -group {PCNext Generation} /testbench/dut/core/ifu/PCNextF
|
||||
add wave -noupdate -group {PCNext Generation} /testbench/dut/core/ifu/PCF
|
||||
add wave -noupdate -group {PCNext Generation} /testbench/dut/core/ifu/bpred/bpred/NextValidPCE
|
||||
add wave -noupdate -group {PCNext Generation} /testbench/dut/core/ifu/PCSpillNextF
|
||||
add wave -noupdate -group {PCNext Generation} /testbench/dut/core/ifu/PCSpillF
|
||||
add wave -noupdate -expand -group ifu -group Bpred -group {branch update selection inputs} /testbench/dut/core/ifu/bpred/bpred/Predictor/DirPredictor/GHRM
|
||||
|
@ -230,8 +275,7 @@ add wave -noupdate -group lsu -group alignment /testbench/dut/core/lsu/ByteMaskM
|
|||
add wave -noupdate -group lsu -group alignment /testbench/dut/core/lsu/ByteMaskExtendedM
|
||||
add wave -noupdate -group lsu -group alignment /testbench/dut/core/lsu/ByteMaskSpillM
|
||||
add wave -noupdate -group lsu -group alignment /testbench/dut/core/lsu/LSUWriteDataM
|
||||
add wave -noupdate -group lsu -group alignment /testbench/dut/core/lsu/LSUWriteDataSpillM
|
||||
add wave -noupdate -group lsu -group alignment /testbench/dut/core/lsu/bus/dcache/dcache/WriteData
|
||||
add wave -noupdate -group lsu -group alignment /testbench/dut/core/lsu/LSUWriteDataSpillMroup alignment /testbench/dut/core/lsu/bus/dcache/dcache/WriteData
|
||||
add wave -noupdate -group lsu -group alignment /testbench/dut/core/lsu/bus/dcache/dcache/ByteMask
|
||||
add wave -noupdate -group lsu -group alignment /testbench/dut/core/lsu/bus/dcache/dcache/WriteSelLogic/BlankByteMask
|
||||
add wave -noupdate -group lsu -group alignment /testbench/dut/core/lsu/bus/dcache/dcache/WriteSelLogic/DemuxedByteMask
|
||||
|
@ -579,9 +623,6 @@ add wave -noupdate -group CSRs /testbench/dut/core/priv/priv/csr/SSTATUS_REGW
|
|||
add wave -noupdate -group CSRs /testbench/dut/core/priv/priv/csr/STVEC_REGW
|
||||
add wave -noupdate -group CSRs /testbench/dut/core/priv/priv/csr/SENVCFG_REGW
|
||||
add wave -noupdate -group CSRs /testbench/dut/core/priv/priv/csr/csrs/csrs/STIMECMP_REGW
|
||||
add wave -noupdate -group CSRs -group {user mode} /testbench/dut/core/priv/priv/csr/csru/csru/FRM_REGW
|
||||
add wave -noupdate -group CSRs -group {user mode} /testbench/dut/core/priv/priv/csr/csru/csru/FFLAGS_REGW
|
||||
add wave -noupdate -group CSRs -group {user mode} /testbench/dut/core/priv/priv/csr/csru/csru/STATUS_FS
|
||||
add wave -noupdate -group CSRs -expand -group {Performance Counters} -label MCYCLE -radix unsigned {/testbench/dut/core/priv/priv/csr/counters/counters/HPMCOUNTER_REGW[0]}
|
||||
add wave -noupdate -group CSRs -expand -group {Performance Counters} -label MINSTRET -radix unsigned {/testbench/dut/core/priv/priv/csr/counters/counters/HPMCOUNTER_REGW[2]}
|
||||
add wave -noupdate -group CSRs -expand -group {Performance Counters} -expand -group BP -label Branch -radix unsigned {/testbench/dut/core/priv/priv/csr/counters/counters/HPMCOUNTER_REGW[3]}
|
||||
|
@ -633,24 +674,11 @@ add wave -noupdate -group Forward /testbench/dut/core/ieu/c/RdW
|
|||
add wave -noupdate -group {alu execution stage} /testbench/dut/core/ieu/dp/ALUResultE
|
||||
add wave -noupdate -group {alu execution stage} /testbench/dut/core/ieu/dp/SrcAE
|
||||
add wave -noupdate -group {alu execution stage} /testbench/dut/core/ieu/dp/SrcBE
|
||||
add wave -noupdate -group FPU /testbench/dut/core/fpu/fpu/FRD1E
|
||||
add wave -noupdate -group FPU /testbench/dut/core/fpu/fpu/FRD2E
|
||||
add wave -noupdate -group FPU /testbench/dut/core/fpu/fpu/FRD3E
|
||||
add wave -noupdate -group FPU /testbench/dut/core/fpu/fpu/ForwardedSrcAE
|
||||
add wave -noupdate -group FPU /testbench/dut/core/fpu/fpu/ForwardedSrcBE
|
||||
add wave -noupdate -group FPU /testbench/dut/core/fpu/fpu/Funct3E
|
||||
add wave -noupdate -group FPU /testbench/dut/core/fpu/fpu/W64E
|
||||
add wave -noupdate -group FPU /testbench/dut/core/fpu/fpu/unpack/X
|
||||
add wave -noupdate -group FPU /testbench/dut/core/fpu/fpu/unpack/Y
|
||||
add wave -noupdate -group FPU /testbench/dut/core/fpu/fpu/unpack/Z
|
||||
add wave -noupdate -group FPU /testbench/dut/core/fpu/fpu/fregfile/rf
|
||||
add wave -noupdate -group wfi /testbench/dut/core/priv/priv/pmd/STATUS_TW
|
||||
add wave -noupdate -group wfi /testbench/dut/core/priv/priv/pmd/PrivilegeModeW
|
||||
add wave -noupdate -group wfi /testbench/dut/core/priv/priv/pmd/wfi/WFICount
|
||||
add wave -noupdate -group wfi /testbench/dut/core/priv/priv/pmd/WFITimeoutM
|
||||
add wave -noupdate -group testbench /testbench/DCacheFlushStart
|
||||
add wave -noupdate /testbench/dut/core/lsu/hptw/hptw/HPTWLoadPageFault
|
||||
add wave -noupdate /testbench/dut/core/lsu/hptw/hptw/HPTWLoadPageFaultDelay
|
||||
add wave -noupdate -group spi /testbench/dut/uncoregen/uncore/spi/spi/PCLK
|
||||
add wave -noupdate -group spi -expand -group interface /testbench/dut/uncoregen/uncore/spi/spi/SPICLK
|
||||
add wave -noupdate -group spi -expand -group interface /testbench/dut/uncoregen/uncore/spi/spi/SPICS
|
||||
|
@ -702,4 +730,4 @@ configure wave -griddelta 40
|
|||
configure wave -timeline 0
|
||||
configure wave -timelineunits ns
|
||||
update
|
||||
WaveRestoreZoom {371 ns} {471 ns}
|
||||
WaveRestoreZoom {371 ns} {471 ns}
|
|
@ -67,8 +67,8 @@ def main():
|
|||
parser.add_argument('-d', "--dist", action='store_true', help="Report distribution of operations")
|
||||
parser.add_argument('-s', "--sim", help="Simulator", choices=["questa", "verilator", "vcs"], default="verilator")
|
||||
args = parser.parse_args()
|
||||
simargs = "I_CACHE_ADDR_LOGGER=1\\\'b1 D_CACHE_ADDR_LOGGER=1\\\'b1"
|
||||
testcmd = "wsim --sim " + args.sim + " rv64gc {} --params \"" + simargs + "\" > /dev/null"
|
||||
simargs = "I_CACHE_ADDR_LOGGER=1\\'b1 D_CACHE_ADDR_LOGGER=1\\'b1"
|
||||
testcmd = "wsim --sim " + args.sim + ' rv64gc {} --params "' + simargs + '" > /dev/null'
|
||||
#cachecmd = "CacheSim.py 64 4 56 44 -f {} --verbose"
|
||||
cachecmd = "CacheSim.py 64 4 56 44 -f {}"
|
||||
mismatches = 0
|
||||
|
|
|
@ -1,32 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
import os,sys,subprocess
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
if not os.path.isfile(sys.path[0]+'/slack-webhook-url.txt'):
|
||||
print('==============================================================')
|
||||
print(' HOWDY! ')
|
||||
print('slack-notifier.py can help let you know when your sim is done.')
|
||||
print('To make it work, please supply your Slack bot webhook URL in:')
|
||||
print(sys.path[0]+'/slack-webhook-url.txt')
|
||||
print('Tutorial for slack webhook urls: https://bit.ly/BenSlackNotifier')
|
||||
print('==============================================================')
|
||||
else:
|
||||
urlFile = open(sys.path[0]+'/slack-webhook-url.txt','r')
|
||||
url = urlFile.readline().strip('\n')
|
||||
|
||||
# Traverse 3 parents up the process tree
|
||||
result = subprocess.check_output('ps -o ppid -p $PPID',shell=True)
|
||||
PPID2 = str(result).split('\\n')[1]
|
||||
result = subprocess.check_output('ps -o ppid -p '+PPID2,shell=True)
|
||||
PPID3 = str(result).split('\\n')[1]
|
||||
# Get command name
|
||||
result = subprocess.check_output('ps -o cmd -p '+PPID3,shell=True)
|
||||
cmdName = str(result).split('\\n')[1]
|
||||
# Get current time
|
||||
timezone_offset = -8.0 # Pacific Standard Time (UTC−08:00)
|
||||
tzinfo = timezone(timedelta(hours=timezone_offset))
|
||||
time = datetime.now(tzinfo).strftime('%I:%M %p')
|
||||
# Send message
|
||||
message = 'Command `'+cmdName+'` completed at '+time+' PST'
|
||||
result = subprocess.run('curl -X POST -H \'Content-type: application/json\' --data \'{"text":"'+message+'"}\' '+url,shell=True,stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
|
||||
print('Simulation stopped. Sending Slack message.')
|
|
@ -12,7 +12,7 @@ import subprocess
|
|||
import sys
|
||||
|
||||
# Global variables
|
||||
WALLY = os.environ.get('WALLY')
|
||||
WALLY = os.environ.get("WALLY")
|
||||
simdir = f"{WALLY}/sim/vcs"
|
||||
cfgdir = f"{WALLY}/config"
|
||||
srcdir = f"{WALLY}/src"
|
||||
|
@ -21,10 +21,10 @@ logdir = f"{simdir}/logs"
|
|||
|
||||
# run a Linux command and return the result as a string in a form that VCS can use
|
||||
def runFindCommand(cmd):
|
||||
res = subprocess.check_output(cmd, shell=True, )
|
||||
res = subprocess.check_output(cmd, shell=True)
|
||||
res = str(res)
|
||||
res = res.replace("\\n", " ") # replace newline with space
|
||||
res = res.replace("\'", "") # strip off quotation marks
|
||||
res = res.replace("'", "") # strip off quotation marks
|
||||
res = res[1:] # strip off leading b from byte string
|
||||
return res
|
||||
|
||||
|
@ -42,20 +42,20 @@ def parseArgs():
|
|||
#parser.add_argument("--gui", "-g", help="Simulate with GUI", action="store_true") # GUI not yet implemented
|
||||
return parser.parse_args()
|
||||
|
||||
def createDirs(args):
|
||||
wkdir = f"{simdir}/wkdir/{args.config}_{args.testsuite}"
|
||||
covdir = f"{simdir}/cov/{args.config}_{args.testsuite}"
|
||||
def createDirs(config, testsuite):
|
||||
wkdir = f"{simdir}/wkdir/{config}_{testsuite}"
|
||||
covdir = f"{simdir}/cov/{config}_{testsuite}"
|
||||
os.makedirs(wkdir, exist_ok=True)
|
||||
os.makedirs(covdir, exist_ok=True)
|
||||
os.makedirs(logdir, exist_ok=True)
|
||||
return wkdir, covdir
|
||||
return wkdir
|
||||
|
||||
def generateFileList():
|
||||
def generateFileList(testbench):
|
||||
rtlsrc_cmd = f'find {srcdir} -name "*.sv" ! -path "{srcdir}/generic/mem/rom1p1r_128x64.sv" ! -path "{srcdir}/generic/mem/ram2p1r1wbe_128x64.sv" ! -path "{srcdir}/generic/mem/rom1p1r_128x32.sv" ! -path "{srcdir}/generic/mem/ram2p1r1wbe_2048x64.sv"'
|
||||
rtlsrc_files = runFindCommand(rtlsrc_cmd)
|
||||
tbcommon_cmd = f'find {tbdir}/common -name "*.sv"'
|
||||
tbcommon_files = runFindCommand(tbcommon_cmd)
|
||||
tb_file = f'{tbdir}/{args.tb}.sv'
|
||||
tb_file = f"{tbdir}/{testbench}.sv"
|
||||
return f"{tb_file} {rtlsrc_files} {tbcommon_files}"
|
||||
|
||||
def processArgs(wkdir, args):
|
||||
|
@ -76,7 +76,7 @@ def processArgs(wkdir, args):
|
|||
# if args.gui:
|
||||
# compileOptions.append("-debug_access+all+reverse -kdb +vcs+vcdpluson")
|
||||
compileOptions = " ".join(compileOptions)
|
||||
simvOptions = " ".join(simvOptions)
|
||||
simvOptions = " ".join(simvOptions)
|
||||
return compileOptions, simvOptions
|
||||
|
||||
def setupParamOverrides(wkdir, args):
|
||||
|
@ -84,35 +84,35 @@ def setupParamOverrides(wkdir, args):
|
|||
with open(paramOverrideFile, "w") as f:
|
||||
for param in args.params.split():
|
||||
[param, value] = param.split("=")
|
||||
if fr"\'" in value: # for bit values
|
||||
value = value.replace(fr"\'", "'")
|
||||
else: # for strings
|
||||
value = f'"{value}"'
|
||||
value = value.replace("\\'", "'") if "\\'" in value else f'"{value}"' # transform quotes/bit indicators
|
||||
f.write(f"assign {value} {args.tb}/{param}\n")
|
||||
return f" -parameters {wkdir}/param_overrides.txt "
|
||||
|
||||
def setupCommands(wkdir, rtlFiles, compileOptions, simvOptions, args):
|
||||
includePath=f"+incdir+{cfgdir}/{args.config} +incdir+{cfgdir}/deriv/{args.config} +incdir+{cfgdir}/shared +incdir+$WALLY/tests +incdir+{tbdir} +incdir+{srcdir}"
|
||||
includePath = f"+incdir+{cfgdir}/{args.config} +incdir+{cfgdir}/deriv/{args.config} +incdir+{cfgdir}/shared +incdir+$WALLY/tests +incdir+{tbdir} +incdir+{srcdir}"
|
||||
vcsStandardFlags = "+lint=all,noGCWM,noUI,noSVA-UA,noIDTS,noNS,noULCO,noCAWM-L,noWMIA-L,noSV-PIU,noSTASKW_CO,noSTASKW_CO1,noSTASKW_RMCOF -suppress +warn -sverilog +vc -Mupdate -line -full64 -lca -ntb_opts sensitive_dyn"
|
||||
vcsCMD = f"vcs {vcsStandardFlags} -top {args.tb} {compileOptions} -Mdir={wkdir} {includePath} {srcdir}/cvw.sv {rtlFiles} -o {wkdir}/sim_out -work {wkdir} -Mlib={wkdir} -l {logdir}/{args.config}_{args.testsuite}.log"
|
||||
simvCMD = f"{wkdir}/sim_out +TEST={args.testsuite} {args.args} -no_save {simvOptions}"
|
||||
return vcsCMD, simvCMD
|
||||
|
||||
def runVCS(wkdir, vcsCMD, simvCMD):
|
||||
def runVCS(vcsCMD, simvCMD):
|
||||
print(f"Executing: {vcsCMD}")
|
||||
subprocess.run(vcsCMD, shell=True)
|
||||
subprocess.run(simvCMD, shell=True)
|
||||
if (args.ccov):
|
||||
COV_RUN = f"urg -dir {wkdir}/coverage.vdb -format text -report IndividualCovReport/{args.config}_{args.testsuite}"
|
||||
subprocess.run(COV_RUN, shell=True)
|
||||
subprocess.run(vcsCMD, shell=True, check=True)
|
||||
subprocess.run(simvCMD, shell=True, check=True)
|
||||
|
||||
def runCoverage(wkdir, config, testsuite):
|
||||
COV_RUN = f"urg -dir {wkdir}/coverage.vdb -format text -report IndividualCovReport/{config}_{testsuite}"
|
||||
subprocess.run(COV_RUN, shell=True, check=True)
|
||||
|
||||
def main(args):
|
||||
print(f"run_vcs Config={args.config} tests={args.testsuite} lockstep={args.lockstep} args='{args.args}' params='{args.params}' define='{args.define}'")
|
||||
wkdir, covdir = createDirs(args)
|
||||
rtlFiles = generateFileList()
|
||||
wkdir = createDirs(args.config, args.testsuite)
|
||||
rtlFiles = generateFileList(args.tb)
|
||||
compileOptions, simvOptions = processArgs(wkdir, args)
|
||||
vcsCMD, simvCMD = setupCommands(wkdir, rtlFiles, compileOptions, simvOptions, args)
|
||||
runVCS(wkdir, vcsCMD, simvCMD)
|
||||
runVCS(vcsCMD, simvCMD)
|
||||
if args.ccov:
|
||||
runCoverage(wkdir, args.config, args.testsuite)
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = parseArgs()
|
||||
|
|
|
@ -15,7 +15,7 @@ export IMPERASD_LICENSE_FILE=27020@zircon.eng.hmc.edu # Change thi
|
|||
export QUESTA_HOME=/cad/mentor/questa_sim-2023.4/questasim # Change this for your path to Questa, excluding bin
|
||||
export DC_HOME=/cad/synopsys/SYN # Change this for your path to Synopsys DC, excluding bin
|
||||
export VCS_HOME=/cad/synopsys/vcs/U-2023.03-SP2-4 # Change this for your path to Synopsys VCS, excluding bin
|
||||
export BREKER_HOME=/cad/breker/trek5-2.1.10b-GCC6_el7 # Change this for your path to Breker Trek
|
||||
export BREKER_HOME=/cad/breker/trek5-2.1.11-GCC6_el7 # Change this for your path to Breker Trek
|
||||
|
||||
# Tools
|
||||
# Questa and Synopsys
|
||||
|
|
|
@ -28,25 +28,40 @@
|
|||
////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
module hazard (
|
||||
input logic BPWrongE, CSRWriteFenceM, RetM, TrapM,
|
||||
input logic StructuralStallD,
|
||||
input logic LSUStallM, IFUStallF, FetchBufferStallF,
|
||||
input logic FPUStallD, ExternalStall,
|
||||
input logic DivBusyE, FDivBusyE,
|
||||
input logic wfiM, IntPendingM,
|
||||
// Stall & flush outputs
|
||||
output logic StallF, StallD, StallE, StallM, StallW,
|
||||
output logic FlushD, FlushE, FlushM, FlushW
|
||||
input logic BPWrongE,
|
||||
CSRWriteFenceM,
|
||||
RetM,
|
||||
TrapM,
|
||||
input logic StructuralStallD,
|
||||
input logic LSUStallM,
|
||||
IFUStallF,
|
||||
FetchBufferStallF,
|
||||
input logic FPUStallD,
|
||||
ExternalStall,
|
||||
input logic DivBusyE,
|
||||
FDivBusyE,
|
||||
input logic wfiM,
|
||||
IntPendingM,
|
||||
// Stall & flush outputs
|
||||
output logic StallF, StallFBF,
|
||||
StallD,
|
||||
StallE,
|
||||
StallM,
|
||||
StallW,
|
||||
output logic FlushD,
|
||||
FlushE,
|
||||
FlushM,
|
||||
FlushW
|
||||
);
|
||||
|
||||
logic StallFCause, StallDCause, StallECause, StallMCause, StallWCause;
|
||||
logic LatestUnstalledD, LatestUnstalledE, LatestUnstalledM, LatestUnstalledW;
|
||||
logic FlushDCause, FlushECause, FlushMCause, FlushWCause;
|
||||
logic StallFCause, StallDCause, StallECause, StallMCause, StallWCause;
|
||||
logic LatestUnstalledD, LatestUnstalledE, LatestUnstalledM, LatestUnstalledW;
|
||||
logic FlushDCause, FlushECause, FlushMCause, FlushWCause;
|
||||
|
||||
logic WFIStallM, WFIInterruptedM;
|
||||
|
||||
// WFI logic
|
||||
assign WFIStallM = wfiM & ~IntPendingM; // WFI waiting for an interrupt or timeout
|
||||
assign WFIStallM = wfiM & ~IntPendingM; // WFI waiting for an interrupt or timeout
|
||||
assign WFIInterruptedM = wfiM & IntPendingM; // WFI detects a pending interrupt. Retire WFI; trap if interrupt is enabled.
|
||||
|
||||
// stalls and flushes
|
||||
|
@ -70,7 +85,7 @@ module hazard (
|
|||
// However, an active division operation resides in the Execute stage, and when the BP incorrectly mispredicts the divide as a taken branch, the divde must still complete
|
||||
// When a WFI is interrupted and causes a trap, it flushes the rest of the pipeline but not the W stage, because the WFI needs to commit
|
||||
assign FlushDCause = TrapM | RetM | CSRWriteFenceM | BPWrongE;
|
||||
assign FlushECause = TrapM | RetM | CSRWriteFenceM |(BPWrongE & ~(DivBusyE | FDivBusyE));
|
||||
assign FlushECause = TrapM | RetM | CSRWriteFenceM | (BPWrongE & ~(DivBusyE | FDivBusyE));
|
||||
assign FlushMCause = TrapM | RetM | CSRWriteFenceM;
|
||||
assign FlushWCause = TrapM & ~WFIInterruptedM;
|
||||
|
||||
|
@ -82,8 +97,9 @@ module hazard (
|
|||
// The IFU and LSU stall the entire pipeline on a cache miss, bus access, or other long operation.
|
||||
// The IFU stalls the entire pipeline rather than just Fetch to avoid complications with instructions later in the pipeline causing Exceptions
|
||||
// A trap could be asserted at the start of a IFU/LSU stall, and should flush the memory operation
|
||||
assign StallFCause = FetchBufferStallF; // | (IFUStallF & ~FlushDCause);
|
||||
assign StallDCause = (StructuralStallD | FPUStallD) & ~FlushDCause;
|
||||
assign StallFBF = (IFUStallF & ~FlushDCause) | (LSUStallM & ~FlushWCause);
|
||||
assign StallFCause = StallFBF | FetchBufferStallF;
|
||||
assign StallDCause = (StructuralStallD | FPUStallD) & ~FlushDCause; // TODO: add stall if empty fetch buffer
|
||||
assign StallECause = (DivBusyE | FDivBusyE) & ~FlushECause;
|
||||
assign StallMCause = WFIStallM & ~FlushMCause;
|
||||
// Need to gate IFUStallF when the equivalent FlushFCause = FlushDCause = 1.
|
||||
|
@ -93,7 +109,7 @@ module hazard (
|
|||
|
||||
// Stall each stage for cause or if the next stage is stalled
|
||||
// coverage off: StallFCause is always 0
|
||||
assign StallF = StallFCause | StallD;
|
||||
assign StallF = StallFCause;
|
||||
// coverage on
|
||||
assign StallD = StallDCause | StallE;
|
||||
assign StallE = StallECause | StallM;
|
||||
|
|
|
@ -26,42 +26,70 @@
|
|||
// and limitations under the License.
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
module fetchbuffer import cvw::*; #(parameter cvw_t P) (
|
||||
input logic clk, reset,
|
||||
input logic StallF, StallD, FlushD,
|
||||
input logic [31:0] WriteData,
|
||||
output logic [31:0] ReadData,
|
||||
output logic FetchBufferStallF
|
||||
module fetchbuffer
|
||||
import cvw::*;
|
||||
#(
|
||||
parameter cvw_t P,
|
||||
parameter WIDTH = 32
|
||||
) (
|
||||
input logic clk,
|
||||
reset,
|
||||
input logic StallF,
|
||||
StallD,
|
||||
FlushD,
|
||||
input logic [WIDTH-1:0] nop,
|
||||
input logic [WIDTH-1:0] WriteData,
|
||||
output logic [WIDTH-1:0] ReadData,
|
||||
output logic FetchBufferStallF,
|
||||
output logic RisingFBStallF
|
||||
);
|
||||
localparam [31:0] nop = 32'h00000013;
|
||||
logic [31:0] ReadReg [P.FETCHBUFFER_ENTRIES-1:0];
|
||||
logic [31:0] ReadFetchBuffer;
|
||||
logic [P.FETCHBUFFER_ENTRIES-1:0] ReadPtr, WritePtr;
|
||||
logic Empty, Full;
|
||||
logic [WIDTH-1:0] ReadReg [P.FETCHBUFFER_ENTRIES-1:0];
|
||||
logic [WIDTH-1:0] ReadFetchBuffer;
|
||||
logic [P.FETCHBUFFER_ENTRIES-1:0] ReadPtr, WritePtr;
|
||||
logic Empty, Full;
|
||||
|
||||
assign Empty = |(ReadPtr & WritePtr); // Bitwise and the read&write ptr, and or the bits of the result together
|
||||
assign Full = |({WritePtr[P.FETCHBUFFER_ENTRIES-2:0], WritePtr[P.FETCHBUFFER_ENTRIES-1]} & ReadPtr); // Same as above but left rotate WritePtr to "add 1"
|
||||
assign FetchBufferStallF = Full;
|
||||
|
||||
flopenl #(32) fbEntries[P.FETCHBUFFER_ENTRIES-1:0] (.clk, .load(reset | FlushD), .en(WritePtr), .d(WriteData), .val(nop), .q(ReadReg));
|
||||
logic [2:0] fbEnable;
|
||||
|
||||
// Fetch buffer entries anded with read ptr for AO Muxing
|
||||
logic [31:0] DaoArr [P.FETCHBUFFER_ENTRIES-1:0];
|
||||
logic fbEnable;
|
||||
logic FetchBufferStallFDelay;
|
||||
assign RisingFBStallF = ~FetchBufferStallFDelay & FetchBufferStallF;
|
||||
|
||||
flop #(1) flop1 (
|
||||
clk,
|
||||
FetchBufferStallF,
|
||||
FetchBufferStallFDelay
|
||||
);
|
||||
assign fbEnable = WritePtr & {3{(~Full | RisingFBStallF)}};
|
||||
flopenl #(WIDTH) fbEntries[P.FETCHBUFFER_ENTRIES-1:0] (
|
||||
.clk,
|
||||
.load(reset | FlushD),
|
||||
.en(fbEnable),
|
||||
.d(WriteData),
|
||||
.val(nop),
|
||||
.q(ReadReg)
|
||||
);
|
||||
|
||||
for (genvar i = 0; i < P.FETCHBUFFER_ENTRIES; i++) begin
|
||||
assign DaoArr[i] = ReadPtr[i] ? ReadReg[i] : '0;
|
||||
end
|
||||
|
||||
or_rows #(P.FETCHBUFFER_ENTRIES, 32) ReadFBAOMux(.a(DaoArr), .y(ReadFetchBuffer));
|
||||
or_rows #(P.FETCHBUFFER_ENTRIES, WIDTH) ReadFBAOMux (
|
||||
.a(DaoArr),
|
||||
.y(ReadFetchBuffer)
|
||||
);
|
||||
|
||||
assign ReadData = Empty ? nop : ReadFetchBuffer;
|
||||
|
||||
always_ff @(posedge clk) begin : shiftRegister
|
||||
if (reset) begin
|
||||
WritePtr <= {{P.FETCHBUFFER_ENTRIES-1{1'b0}}, 1'b1};
|
||||
ReadPtr <= {{P.FETCHBUFFER_ENTRIES-1{1'b0}}, 1'b1};
|
||||
WritePtr <= {{P.FETCHBUFFER_ENTRIES - 1{1'b0}}, 1'b1};
|
||||
ReadPtr <= {{P.FETCHBUFFER_ENTRIES - 1{1'b0}}, 1'b1};
|
||||
end else begin
|
||||
WritePtr <= ~(Full | StallF) ? {WritePtr[P.FETCHBUFFER_ENTRIES-2:0], WritePtr[P.FETCHBUFFER_ENTRIES-1]} : WritePtr;
|
||||
WritePtr <= ~(Full | StallF)? {WritePtr[P.FETCHBUFFER_ENTRIES-2:0], WritePtr[P.FETCHBUFFER_ENTRIES-1]} : WritePtr;
|
||||
ReadPtr <= ~(StallD | Empty) ? {ReadPtr[P.FETCHBUFFER_ENTRIES-2:0], ReadPtr[P.FETCHBUFFER_ENTRIES-1]} : ReadPtr;
|
||||
end
|
||||
end
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
|
||||
module ifu import cvw::*; #(parameter cvw_t P) (
|
||||
input logic clk, reset,
|
||||
input logic StallF, StallD, StallE, StallM, StallW,
|
||||
input logic StallF, StallD, StallE, StallM, StallW, StallFBF,
|
||||
input logic FlushD, FlushE, FlushM, FlushW,
|
||||
output logic IFUStallF, // IFU stalsl pipeline during a multicycle operation
|
||||
// Command from CPU
|
||||
|
@ -303,11 +303,17 @@ module ifu import cvw::*; #(parameter cvw_t P) (
|
|||
assign IFUStallF = IFUCacheBusStallF | SelSpillNextF;
|
||||
assign GatedStallD = StallD & ~SelSpillNextF;
|
||||
|
||||
logic NoStallPCF;
|
||||
if (P.FETCHBUFFER_ENTRIES != 0) begin : fetchbuffer
|
||||
fetchbuffer #(P) fetchbuff(.clk, .reset, .StallF, .StallD, .FlushD, .WriteData(PostSpillInstrRawF), .ReadData(InstrRawD), .FetchBufferStallF);
|
||||
fetchbuffer #(P) fetchbuff(.clk, .reset, .StallF, .StallD, .FlushD, .nop, .WriteData(PostSpillInstrRawF), .ReadData(InstrRawD), .FetchBufferStallF, .RisingFBStallF());
|
||||
logic PCFetchBufferStallD, FetchBufferStallFDelay;
|
||||
flop #(1) flop1 (clk, FetchBufferStallF, FetchBufferStallFDelay);
|
||||
assign NoStallPCF = ~FetchBufferStallFDelay & FetchBufferStallF;
|
||||
fetchbuffer #(P, P.XLEN) PCFetchBuffer(.clk, .reset, .StallF, .StallD, .FlushD, .nop({{1'b1},{(P.XLEN-1){1'b0}}}), .WriteData(PCF), .ReadData(PCD), .FetchBufferStallF(PCFetchBufferStallD), .RisingFBStallF());
|
||||
end else begin
|
||||
flopenl #(32) AlignedInstrRawDFlop(clk, reset | FlushD, ~StallD, PostSpillInstrRawF, nop, InstrRawD);
|
||||
assign FetchBufferStallF = '0;
|
||||
flopenrc #(P.XLEN) PCDReg(clk, reset, FlushD, ~StallD, PCF, PCD);
|
||||
end
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -320,7 +326,10 @@ module ifu import cvw::*; #(parameter cvw_t P) (
|
|||
|
||||
mux3 #(P.XLEN) pcmux3(PC2NextF, EPCM, TrapVectorM, {TrapM, RetM}, UnalignedPCNextF);
|
||||
mux2 #(P.XLEN) pcresetmux({UnalignedPCNextF[P.XLEN-1:1], 1'b0}, P.RESET_VECTOR[P.XLEN-1:0], reset, PCNextF);
|
||||
flopen #(P.XLEN) pcreg(clk, ~StallF | reset, PCNextF, PCF);
|
||||
logic PCEnable;
|
||||
assign PCEnable = ~StallF | reset | NoStallPCF;
|
||||
// assign PCEnable = ~StallF | reset;
|
||||
flopen #(P.XLEN) pcreg(clk, PCEnable, PCNextF, PCF); //* make this NoStallPCF
|
||||
|
||||
// pcadder
|
||||
// add 2 or 4 to the PC, based on whether the instruction is 16 bits or 32
|
||||
|
@ -375,7 +384,6 @@ module ifu import cvw::*; #(parameter cvw_t P) (
|
|||
////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Decode stage pipeline register and logic
|
||||
flopenrc #(P.XLEN) PCDReg(clk, reset, FlushD, ~StallD, PCF, PCD);
|
||||
|
||||
// expand 16-bit compressed instructions to 32 bits
|
||||
if (P.ZCA_SUPPORTED) begin: decomp
|
||||
|
|
|
@ -48,7 +48,7 @@ module wallypipelinedcore import cvw::*; #(parameter cvw_t P) (
|
|||
input logic ExternalStall
|
||||
);
|
||||
|
||||
logic StallF, StallD, StallE, StallM, StallW;
|
||||
logic StallF, StallD, StallE, StallM, StallW, StallFBF;
|
||||
logic FlushD, FlushE, FlushM, FlushW;
|
||||
logic TrapM, RetM;
|
||||
|
||||
|
@ -175,7 +175,7 @@ module wallypipelinedcore import cvw::*; #(parameter cvw_t P) (
|
|||
|
||||
// instruction fetch unit: PC, branch prediction, instruction cache
|
||||
ifu #(P) ifu(.clk, .reset,
|
||||
.StallF, .StallD, .StallE, .StallM, .StallW, .FlushD, .FlushE, .FlushM, .FlushW,
|
||||
.StallF, .StallD, .StallE, .StallM, .StallW, .StallFBF, .FlushD, .FlushE, .FlushM, .FlushW,
|
||||
.InstrValidE, .InstrValidD,
|
||||
.BranchD, .BranchE, .JumpD, .JumpE, .ICacheStallF,
|
||||
// Fetch
|
||||
|
@ -282,7 +282,7 @@ module wallypipelinedcore import cvw::*; #(parameter cvw_t P) (
|
|||
.DivBusyE, .FDivBusyE,
|
||||
.wfiM, .IntPendingM,
|
||||
// Stall & flush outputs
|
||||
.StallF, .StallD, .StallE, .StallM, .StallW,
|
||||
.StallF, .StallD, .StallE, .StallM, .StallW, .StallFBF,
|
||||
.FlushD, .FlushE, .FlushM, .FlushW);
|
||||
|
||||
// privileged unit
|
||||
|
|
|
@ -27,50 +27,46 @@ def synthsintocsv():
|
|||
specReg = re.compile('[a-zA-Z0-9]+')
|
||||
metricReg = re.compile('-?\d+\.\d+[e]?[-+]?\d*')
|
||||
|
||||
file = open("Summary.csv", "w")
|
||||
writer = csv.writer(file)
|
||||
writer.writerow(['Width', 'Config', 'Mod', 'Tech', 'Target Freq', 'Delay', 'Area'])
|
||||
with open("Summary.csv", "w") as file:
|
||||
writer = csv.writer(file)
|
||||
writer.writerow(['Width', 'Config', 'Mod', 'Tech', 'Target Freq', 'Delay', 'Area'])
|
||||
|
||||
for oneSynth in allSynths:
|
||||
descrip = specReg.findall(oneSynth)
|
||||
# print("From " + oneSynth + " Find ")
|
||||
# for d in descrip:
|
||||
# print(d)
|
||||
base = 4 if descrip[3] == "sram" else 3
|
||||
width = descrip[base][:4]
|
||||
config = descrip[base][4:]
|
||||
if descrip[base+1][-2:] == 'nm':
|
||||
mod = ''
|
||||
else:
|
||||
mod = descrip[base+1]
|
||||
descrip = descrip[1:]
|
||||
tech = descrip[base+1][:-2]
|
||||
freq = descrip[base+2]
|
||||
# print(width, config, mod, tech, freq)
|
||||
metrics = []
|
||||
for phrase in ['Path Slack', 'Design Area']:
|
||||
bashCommand = 'grep "{}" '+ oneSynth[2:]+'/reports/*qor*'
|
||||
bashCommand = bashCommand.format(phrase)
|
||||
# print(bashCommand)
|
||||
try:
|
||||
output = subprocess.check_output(['bash','-c', bashCommand])
|
||||
nums = metricReg.findall(str(output))
|
||||
nums = [float(m) for m in nums]
|
||||
metrics += nums
|
||||
except:
|
||||
print(width + config + tech + '_' + freq + " doesn't have reports")
|
||||
if metrics == []:
|
||||
pass
|
||||
else:
|
||||
delay = 1000/int(freq) - metrics[0]
|
||||
area = metrics[1]
|
||||
writer.writerow([width, config, mod, tech, freq, delay, area])
|
||||
|
||||
for oneSynth in allSynths:
|
||||
descrip = specReg.findall(oneSynth)
|
||||
# print("From " + oneSynth + " Find ")
|
||||
# for d in descrip:
|
||||
# print(d)
|
||||
if (descrip[3] == "sram"):
|
||||
base = 4
|
||||
else:
|
||||
base = 3
|
||||
width = descrip[base][:4]
|
||||
config = descrip[base][4:]
|
||||
if descrip[base+1][-2:] == 'nm':
|
||||
mod = ''
|
||||
else:
|
||||
mod = descrip[base+1]
|
||||
descrip = descrip[1:]
|
||||
tech = descrip[base+1][:-2]
|
||||
freq = descrip[base+2]
|
||||
# print(width, config, mod, tech, freq)
|
||||
metrics = []
|
||||
for phrase in ['Path Slack', 'Design Area']:
|
||||
bashCommand = 'grep "{}" '+ oneSynth[2:]+'/reports/*qor*'
|
||||
bashCommand = bashCommand.format(phrase)
|
||||
# print(bashCommand)
|
||||
try:
|
||||
output = subprocess.check_output(['bash','-c', bashCommand])
|
||||
nums = metricReg.findall(str(output))
|
||||
nums = [float(m) for m in nums]
|
||||
metrics += nums
|
||||
except:
|
||||
print(width + config + tech + '_' + freq + " doesn't have reports")
|
||||
if metrics == []:
|
||||
pass
|
||||
else:
|
||||
delay = 1000/int(freq) - metrics[0]
|
||||
area = metrics[1]
|
||||
writer.writerow([width, config, mod, tech, freq, delay, area])
|
||||
file.close()
|
||||
|
||||
|
||||
def synthsfromcsv(filename):
|
||||
Synth = namedtuple("Synth", "width config mod tech freq delay area")
|
||||
with open(filename, newline='') as csvfile:
|
||||
|
@ -93,7 +89,7 @@ def freqPlot(tech, width, config):
|
|||
|
||||
freqsL, delaysL, areasL = ([[], []] for i in range(3))
|
||||
for oneSynth in allSynths:
|
||||
if (width == oneSynth.width) & (config == oneSynth.config) & (tech == oneSynth.tech) & ('orig' == oneSynth.mod):
|
||||
if (width == oneSynth.width) & (config == oneSynth.config) & (tech == oneSynth.tech) & (oneSynth.mod == 'orig'):
|
||||
ind = (1000/oneSynth.delay < (0.95*oneSynth.freq)) # when delay is within target clock period
|
||||
freqsL[ind] += [oneSynth.freq]
|
||||
delaysL[ind] += [oneSynth.delay]
|
||||
|
@ -101,10 +97,7 @@ def freqPlot(tech, width, config):
|
|||
|
||||
fig, (ax1, ax2) = plt.subplots(2, 1, sharex=True)
|
||||
allFreqs = list(flatten(freqsL))
|
||||
if allFreqs != []:
|
||||
median = np.median(allFreqs)
|
||||
else:
|
||||
median = 0
|
||||
median = np.median(allFreqs) if allFreqs != [] else 0
|
||||
|
||||
for ind in [0,1]:
|
||||
areas = areasL[ind]
|
||||
|
@ -169,11 +162,10 @@ def plotFeatures(tech, width, config):
|
|||
delays, areas, labels = ([] for i in range(3))
|
||||
freq = techdict[tech].targfreq
|
||||
for oneSynth in allSynths:
|
||||
if (tech == oneSynth.tech) & (freq == oneSynth.freq):
|
||||
if (oneSynth.config == config) & (width == oneSynth.width):
|
||||
delays += [oneSynth.delay]
|
||||
areas += [oneSynth.area]
|
||||
labels += [oneSynth.mod]
|
||||
if (tech == oneSynth.tech) & (freq == oneSynth.freq) & (oneSynth.config == config) & (width == oneSynth.width):
|
||||
delays += [oneSynth.delay]
|
||||
areas += [oneSynth.area]
|
||||
labels += [oneSynth.mod]
|
||||
|
||||
if (delays == []):
|
||||
print("No delays found for tech ", tech, " freq ", freq, ". Did you set --sky130freq, --sky90freq and --tsmcfreq?\n")
|
||||
|
@ -186,7 +178,7 @@ def plotFeatures(tech, width, config):
|
|||
plt.title(titlestr)
|
||||
plt.savefig(final_directory + '/features_'+titlestr+'.png')
|
||||
|
||||
|
||||
|
||||
def plotConfigs(tech, mod=''):
|
||||
delays, areas, labels = ([] for i in range(3))
|
||||
freq = techdict[tech].targfreq
|
||||
|
@ -227,7 +219,7 @@ def normAreaDelay(mod=''):
|
|||
ax.legend(handles = fullLeg, loc='upper left')
|
||||
plt.savefig(final_directory + '/normAreaDelay.png')
|
||||
|
||||
|
||||
|
||||
def addFO4axis(fig, ax, tech):
|
||||
fo4 = techdict[tech].fo4
|
||||
|
||||
|
@ -282,4 +274,4 @@ if __name__ == '__main__':
|
|||
plotConfigs('sky130', mod='orig')
|
||||
plotConfigs('tsmc28psyn', mod='orig')
|
||||
normAreaDelay(mod='orig')
|
||||
os.system("./extractArea.pl");
|
||||
os.system("./extractArea.pl")
|
||||
|
|
|
@ -50,49 +50,48 @@ def synthsintocsv():
|
|||
specReg = re.compile("[a-zA-Z0-9]+")
|
||||
metricReg = re.compile("-?\d+\.\d+[e]?[-+]?\d*")
|
||||
|
||||
file = open("ppaData.csv", "w")
|
||||
writer = csv.writer(file)
|
||||
writer.writerow(
|
||||
[
|
||||
"Module",
|
||||
"Tech",
|
||||
"Width",
|
||||
"Target Freq",
|
||||
"Delay",
|
||||
"Area",
|
||||
"L Power (nW)",
|
||||
"D energy (nJ)",
|
||||
]
|
||||
)
|
||||
with open("ppaData.csv", "w") as file:
|
||||
writer = csv.writer(file)
|
||||
writer.writerow(
|
||||
[
|
||||
"Module",
|
||||
"Tech",
|
||||
"Width",
|
||||
"Target Freq",
|
||||
"Delay",
|
||||
"Area",
|
||||
"L Power (nW)",
|
||||
"D energy (nJ)",
|
||||
]
|
||||
)
|
||||
|
||||
for oneSynth in allSynths:
|
||||
module, width, risc, tech, freq = specReg.findall(oneSynth)[1:6]
|
||||
tech = tech[:-2]
|
||||
metrics = []
|
||||
for phrase in [["Path Slack", "qor"], ["Design Area", "qor"], ["100", "power"]]:
|
||||
bashCommand = 'grep "{}" ' + oneSynth[2:] + "/reports/*{}*"
|
||||
bashCommand = bashCommand.format(*phrase)
|
||||
try:
|
||||
output = subprocess.check_output(["bash", "-c", bashCommand])
|
||||
except:
|
||||
print(module + width + tech + freq + " doesn't have reports")
|
||||
print("Consider running cleanup() first")
|
||||
nums = metricReg.findall(str(output))
|
||||
nums = [float(m) for m in nums]
|
||||
metrics += nums
|
||||
delay = 1000 / int(freq) - metrics[0]
|
||||
area = metrics[1]
|
||||
lpower = metrics[4]
|
||||
tpower = (metrics[2] + metrics[3] + metrics[4]*.000001)
|
||||
denergy = (
|
||||
(tpower) / int(freq) * 1000
|
||||
) # (switching + internal powers)*delay, more practical units for regression coefs
|
||||
for oneSynth in allSynths:
|
||||
module, width, risc, tech, freq = specReg.findall(oneSynth)[1:6]
|
||||
tech = tech[:-2]
|
||||
metrics = []
|
||||
for phrase in [["Path Slack", "qor"], ["Design Area", "qor"], ["100", "power"]]:
|
||||
bashCommand = 'grep "{}" ' + oneSynth[2:] + "/reports/*{}*"
|
||||
bashCommand = bashCommand.format(*phrase)
|
||||
try:
|
||||
output = subprocess.check_output(["bash", "-c", bashCommand])
|
||||
except:
|
||||
print(module + width + tech + freq + " doesn't have reports")
|
||||
print("Consider running cleanup() first")
|
||||
nums = metricReg.findall(str(output))
|
||||
nums = [float(m) for m in nums]
|
||||
metrics += nums
|
||||
delay = 1000 / int(freq) - metrics[0]
|
||||
area = metrics[1]
|
||||
lpower = metrics[4]
|
||||
tpower = (metrics[2] + metrics[3] + metrics[4]*.000001)
|
||||
denergy = (
|
||||
(tpower) / int(freq) * 1000
|
||||
) # (switching + internal powers)*delay, more practical units for regression coefs
|
||||
|
||||
if "flop" in module: # since two flops in each module
|
||||
[area, lpower, denergy] = [n / 2 for n in [area, lpower, denergy]]
|
||||
if "flop" in module: # since two flops in each module
|
||||
[area, lpower, denergy] = [n / 2 for n in [area, lpower, denergy]]
|
||||
|
||||
writer.writerow([module, tech, width, freq, delay, area, lpower, denergy])
|
||||
file.close()
|
||||
writer.writerow([module, tech, width, freq, delay, area, lpower, denergy])
|
||||
|
||||
|
||||
def cleanup():
|
||||
|
@ -129,15 +128,12 @@ def getVals(tech, module, var, freq=None, width=None):
|
|||
works at a specified target frequency or if none is given, uses the synthesis with the best achievable delay for each width
|
||||
"""
|
||||
|
||||
if width != None:
|
||||
widthsToGet = width
|
||||
else:
|
||||
widthsToGet = widths
|
||||
widthsToGet = width if width is not None else widths
|
||||
|
||||
metric = []
|
||||
widthL = []
|
||||
|
||||
if freq != None:
|
||||
if freq is not None:
|
||||
for oneSynth in allSynths:
|
||||
if (
|
||||
(oneSynth.freq == freq)
|
||||
|
@ -171,37 +167,30 @@ def csvOfBest(filename):
|
|||
m = np.Inf # large number to start
|
||||
best = None
|
||||
for oneSynth in allSynths: # best achievable, rightmost green
|
||||
if (
|
||||
(oneSynth.width == w)
|
||||
& (oneSynth.tech == tech)
|
||||
& (oneSynth.module == mod)
|
||||
):
|
||||
if (oneSynth.delay < m) & (
|
||||
1000 / oneSynth.delay > oneSynth.freq
|
||||
):
|
||||
if (oneSynth.width == w) & (oneSynth.tech == tech) & (oneSynth.module == mod):
|
||||
if (oneSynth.delay < m) & (1000 / oneSynth.delay > oneSynth.freq):
|
||||
m = oneSynth.delay
|
||||
best = oneSynth
|
||||
|
||||
if (best != None) & (best not in bestSynths):
|
||||
if (best is not None) & (best not in bestSynths):
|
||||
bestSynths += [best]
|
||||
|
||||
file = open(filename, "w")
|
||||
writer = csv.writer(file)
|
||||
writer.writerow(
|
||||
[
|
||||
"Module",
|
||||
"Tech",
|
||||
"Width",
|
||||
"Target Freq",
|
||||
"Delay",
|
||||
"Area",
|
||||
"L Power (nW)",
|
||||
"D energy (nJ)",
|
||||
]
|
||||
)
|
||||
for synth in bestSynths:
|
||||
writer.writerow(list(synth))
|
||||
file.close()
|
||||
with open(filename, "w") as file:
|
||||
writer = csv.writer(file)
|
||||
writer.writerow(
|
||||
[
|
||||
"Module",
|
||||
"Tech",
|
||||
"Width",
|
||||
"Target Freq",
|
||||
"Delay",
|
||||
"Area",
|
||||
"L Power (nW)",
|
||||
"D energy (nJ)",
|
||||
]
|
||||
)
|
||||
for synth in bestSynths:
|
||||
writer.writerow(list(synth))
|
||||
return bestSynths
|
||||
|
||||
|
||||
|
@ -229,7 +218,7 @@ def genLegend(fits, coefs, r2=None, spec=None, ale=False):
|
|||
eq = ""
|
||||
ind = 0
|
||||
|
||||
for k in eqDict.keys():
|
||||
for k in eqDict:
|
||||
if k in fits:
|
||||
if str(coefsr[ind]) != "0":
|
||||
eq += " + " + coefsr[ind] + eqDict[k]
|
||||
|
@ -237,7 +226,7 @@ def genLegend(fits, coefs, r2=None, spec=None, ale=False):
|
|||
|
||||
eq = eq[3:] # chop off leading ' + '
|
||||
|
||||
if (r2 == None) or (spec == None):
|
||||
if (r2 is None) or (spec is None):
|
||||
return eq
|
||||
else:
|
||||
legend_elements = [lines.Line2D([0], [0], color=spec.color, label=eq)]
|
||||
|
@ -277,10 +266,7 @@ def oneMetricPlot(
|
|||
modFit = fitDict[module]
|
||||
fits = modFit[ale]
|
||||
|
||||
if freq:
|
||||
ls = "--"
|
||||
else:
|
||||
ls = "-"
|
||||
ls = "--" if freq else "-"
|
||||
|
||||
for spec in techSpecs:
|
||||
# print(f"Searching for module of spec {spec} and module {module} and var {var}")
|
||||
|
@ -339,7 +325,7 @@ def oneMetricPlot(
|
|||
ax.add_artist(ax.legend(handles=fullLeg, loc=legLoc))
|
||||
titleStr = (
|
||||
" (target " + str(freq) + "MHz)"
|
||||
if freq != None
|
||||
if freq is not None
|
||||
else " (best achievable delay)"
|
||||
)
|
||||
ax.set_title(module + titleStr)
|
||||
|
@ -403,72 +389,16 @@ def makeCoefTable():
|
|||
"""writes CSV with each line containing the coefficients for a regression fit
|
||||
to a particular combination of module, metric (including both techs, normalized)
|
||||
"""
|
||||
file = open("ppaFitting.csv", "w")
|
||||
writer = csv.writer(file)
|
||||
writer.writerow(
|
||||
["Module", "Metric", "Target", "1", "N", "N^2", "log2(N)", "Nlog2(N)", "R^2"]
|
||||
)
|
||||
with open("ppaFitting.csv", "w") as file:
|
||||
writer = csv.writer(file)
|
||||
writer.writerow(
|
||||
["Module", "Metric", "Target", "1", "N", "N^2", "log2(N)", "Nlog2(N)", "R^2"]
|
||||
)
|
||||
|
||||
for module in modules:
|
||||
for freq in [10, None]:
|
||||
target = "easy" if freq else "hard"
|
||||
for var in ["delay", "area", "lpower", "denergy"]:
|
||||
ale = var != "delay"
|
||||
metL = []
|
||||
modFit = fitDict[module]
|
||||
fits = modFit[ale]
|
||||
|
||||
for spec in techSpecs:
|
||||
metric = getVals(spec.tech, module, var, freq=freq)
|
||||
techdict = spec._asdict()
|
||||
norm = techdict[var]
|
||||
metL += [m / norm for m in metric]
|
||||
|
||||
xp, pred, coefs, r2 = regress(widths * 2, metL, fits, ale)
|
||||
coefs = np.ndarray.tolist(coefs)
|
||||
coefsToWrite = [None] * 5
|
||||
fitTerms = "clsgn"
|
||||
ind = 0
|
||||
for i in range(len(fitTerms)):
|
||||
if fitTerms[i] in fits:
|
||||
coefsToWrite[i] = coefs[ind]
|
||||
ind += 1
|
||||
row = [module, var, target] + coefsToWrite + [r2]
|
||||
writer.writerow(row)
|
||||
|
||||
file.close()
|
||||
|
||||
|
||||
def sigfig(num, figs):
|
||||
return "{:g}".format(float("{:.{p}g}".format(num, p=figs)))
|
||||
|
||||
|
||||
def makeEqTable():
|
||||
"""writes CSV with each line containing the equations for fits for each metric
|
||||
to a particular module (including both techs, normalized)
|
||||
"""
|
||||
file = open("ppaEquations.csv", "w")
|
||||
writer = csv.writer(file)
|
||||
writer.writerow(
|
||||
[
|
||||
"Element",
|
||||
"Best delay",
|
||||
"Fast area",
|
||||
"Fast leakage",
|
||||
"Fast energy",
|
||||
"Small area",
|
||||
"Small leakage",
|
||||
"Small energy",
|
||||
]
|
||||
)
|
||||
|
||||
for module in modules:
|
||||
eqs = []
|
||||
for freq in [None, 10]:
|
||||
for var in ["delay", "area", "lpower", "denergy"]:
|
||||
if (var == "delay") and (freq == 10):
|
||||
pass
|
||||
else:
|
||||
for module in modules:
|
||||
for freq in [10, None]:
|
||||
target = "easy" if freq else "hard"
|
||||
for var in ["delay", "area", "lpower", "denergy"]:
|
||||
ale = var != "delay"
|
||||
metL = []
|
||||
modFit = fitDict[module]
|
||||
|
@ -482,12 +412,63 @@ def makeEqTable():
|
|||
|
||||
xp, pred, coefs, r2 = regress(widths * 2, metL, fits, ale)
|
||||
coefs = np.ndarray.tolist(coefs)
|
||||
eqs += [genLegend(fits, coefs, ale=ale)]
|
||||
row = [module] + eqs
|
||||
writer.writerow(row)
|
||||
coefsToWrite = [None] * 5
|
||||
fitTerms = "clsgn"
|
||||
ind = 0
|
||||
for i in range(len(fitTerms)):
|
||||
if fitTerms[i] in fits:
|
||||
coefsToWrite[i] = coefs[ind]
|
||||
ind += 1
|
||||
row = [module, var, target] + coefsToWrite + [r2]
|
||||
writer.writerow(row)
|
||||
|
||||
file.close()
|
||||
|
||||
def sigfig(num, figs):
|
||||
return "{:g}".format(float("{:.{p}g}".format(num, p=figs)))
|
||||
|
||||
|
||||
def makeEqTable():
|
||||
"""writes CSV with each line containing the equations for fits for each metric
|
||||
to a particular module (including both techs, normalized)
|
||||
"""
|
||||
with open("ppaEquations.csv", "w") as file:
|
||||
writer = csv.writer(file)
|
||||
writer.writerow(
|
||||
[
|
||||
"Element",
|
||||
"Best delay",
|
||||
"Fast area",
|
||||
"Fast leakage",
|
||||
"Fast energy",
|
||||
"Small area",
|
||||
"Small leakage",
|
||||
"Small energy",
|
||||
]
|
||||
)
|
||||
|
||||
for module in modules:
|
||||
eqs = []
|
||||
for freq in [None, 10]:
|
||||
for var in ["delay", "area", "lpower", "denergy"]:
|
||||
if (var == "delay") and (freq == 10):
|
||||
pass
|
||||
else:
|
||||
ale = var != "delay"
|
||||
metL = []
|
||||
modFit = fitDict[module]
|
||||
fits = modFit[ale]
|
||||
|
||||
for spec in techSpecs:
|
||||
metric = getVals(spec.tech, module, var, freq=freq)
|
||||
techdict = spec._asdict()
|
||||
norm = techdict[var]
|
||||
metL += [m / norm for m in metric]
|
||||
|
||||
xp, pred, coefs, r2 = regress(widths * 2, metL, fits, ale)
|
||||
coefs = np.ndarray.tolist(coefs)
|
||||
eqs += [genLegend(fits, coefs, ale=ale)]
|
||||
row = [module] + eqs
|
||||
writer.writerow(row)
|
||||
|
||||
def genFuncs(fits="clsgn"):
|
||||
"""helper function for regress()
|
||||
|
@ -719,7 +700,7 @@ def plotPPA(mod, freq=None, norm=True, aleOpt=False):
|
|||
else:
|
||||
axs[i, j].legend(handles=leg, handlelength=1.5)
|
||||
|
||||
titleStr = " (target " + str(freq) + "MHz)" if freq != None else ""
|
||||
titleStr = f" (target {freq} MHz)" if freq is not None else ""
|
||||
plt.suptitle(mod + titleStr)
|
||||
plt.tight_layout(pad=0.05, w_pad=1, h_pad=0.5, rect=(0, 0, 1, 0.97))
|
||||
|
||||
|
@ -819,10 +800,7 @@ def stdDevError():
|
|||
norm = techdict[var]
|
||||
metL += [m / norm for m in metric]
|
||||
|
||||
if ale:
|
||||
ws = [w / normAddWidth for w in widths]
|
||||
else:
|
||||
ws = widths
|
||||
ws = [w / normAddWidth for w in widths] if ale else widths
|
||||
ws = ws * 2
|
||||
mat = []
|
||||
for w in ws:
|
||||
|
@ -896,7 +874,7 @@ if __name__ == "__main__":
|
|||
"flop": ["c", "l", "l"],
|
||||
"binencoder": ["cg", "l", "l"],
|
||||
}
|
||||
fitDict.update(dict.fromkeys(["mux2", "mux4", "mux8"], ["cg", "l", "l"]))
|
||||
fitDict.update({key: ["cg", "l", "l"] for key in ["mux2", "mux4", "mux8"]})
|
||||
|
||||
TechSpec = namedtuple("TechSpec", "tech color shape delay area lpower denergy")
|
||||
# FO4 delay information information
|
||||
|
|
|
@ -11,7 +11,7 @@ from multiprocessing import Pool
|
|||
from ppaAnalyze import synthsfromcsv
|
||||
|
||||
def runCommand(module, width, tech, freq):
|
||||
command = "make synth DESIGN={} WIDTH={} TECH={} DRIVE=INV FREQ={} MAXOPT=1 MAXCORES=1".format(module, width, tech, freq)
|
||||
command = f"make synth DESIGN={module} WIDTH={width} TECH={tech} DRIVE=INV FREQ={freq} MAXOPT=1 MAXCORES=1"
|
||||
subprocess.call(command, shell=True)
|
||||
|
||||
def deleteRedundant(synthsToRun):
|
||||
|
@ -19,7 +19,7 @@ def deleteRedundant(synthsToRun):
|
|||
synthStr = "rm -rf runs/{}_{}_rv32e_{}_{}_*"
|
||||
for synth in synthsToRun:
|
||||
bashCommand = synthStr.format(*synth)
|
||||
outputCPL = subprocess.check_output(['bash','-c', bashCommand])
|
||||
subprocess.check_output(['bash','-c', bashCommand])
|
||||
|
||||
def freqSweep(module, width, tech):
|
||||
synthsToRun = []
|
||||
|
@ -71,29 +71,29 @@ def allCombos(widths, modules, techs, freqs):
|
|||
if __name__ == '__main__':
|
||||
|
||||
##### Run specific syntheses for a specific frequency
|
||||
widths = [8, 16, 32, 64, 128]
|
||||
modules = ['mul', 'adder', 'shifter', 'flop', 'comparator', 'binencoder', 'csa', 'mux2', 'mux4', 'mux8']
|
||||
techs = ['sky90', 'sky130', 'tsmc28', 'tsmc28psyn']
|
||||
freqs = [5000]
|
||||
synthsToRun = allCombos(widths, modules, techs, freqs)
|
||||
widths = [8, 16, 32, 64, 128]
|
||||
modules = ['mul', 'adder', 'shifter', 'flop', 'comparator', 'binencoder', 'csa', 'mux2', 'mux4', 'mux8']
|
||||
techs = ['sky90', 'sky130', 'tsmc28', 'tsmc28psyn']
|
||||
freqs = [5000]
|
||||
synthsToRun = allCombos(widths, modules, techs, freqs)
|
||||
|
||||
##### Run a sweep based on best delay found in existing syntheses
|
||||
module = 'adder'
|
||||
width = 32
|
||||
tech = 'tsmc28psyn'
|
||||
synthsToRun = freqSweep(module, width, tech)
|
||||
module = 'adder'
|
||||
width = 32
|
||||
tech = 'tsmc28psyn'
|
||||
synthsToRun = freqSweep(module, width, tech)
|
||||
|
||||
##### Run a sweep for multiple modules/widths based on best delay found in existing syntheses
|
||||
modules = ['adder']
|
||||
modules = ['adder']
|
||||
# widths = [8, 16, 32, 64, 128]
|
||||
widths = [32]
|
||||
tech = 'sky130'
|
||||
synthsToRun = freqModuleSweep(widths, modules, tech)
|
||||
widths = [32]
|
||||
tech = 'sky130'
|
||||
synthsToRun = freqModuleSweep(widths, modules, tech)
|
||||
|
||||
##### Only do syntheses for which a run doesn't already exist
|
||||
synthsToRun = filterRedundant(synthsToRun)
|
||||
pool = Pool(processes=25)
|
||||
synthsToRun = filterRedundant(synthsToRun)
|
||||
pool = Pool(processes=25)
|
||||
|
||||
pool.starmap(runCommand, synthsToRun)
|
||||
pool.close()
|
||||
pool.join()
|
||||
pool.join()
|
||||
|
|
|
@ -15,59 +15,52 @@ import os
|
|||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument("DESIGN")
|
||||
parser.add_argument("HDLPATH");
|
||||
parser.add_argument("HDLPATH")
|
||||
|
||||
args=parser.parse_args()
|
||||
|
||||
fin_path = glob.glob(f"{os.getenv('WALLY')}/src/**/{args.DESIGN}.sv",recursive=True)[0]
|
||||
|
||||
fin = open(fin_path, "r", encoding='utf-8')
|
||||
with open(fin_path) as fin:
|
||||
lines = fin.readlines()
|
||||
|
||||
lines = fin.readlines()
|
||||
# keeps track of what line number the module header begins
|
||||
lineModuleStart = 0
|
||||
|
||||
# keeps track of what line number the module header begins
|
||||
lineModuleStart = 0
|
||||
# keeps track of what line number the module header ends
|
||||
lineModuleEnd = 0
|
||||
|
||||
# keeps track of what line number the module header ends
|
||||
lineModuleEnd = 0
|
||||
# keeps track of module name
|
||||
moduleName = ""
|
||||
|
||||
# keeps track of module name
|
||||
moduleName = ""
|
||||
# string that will keep track of the running module header
|
||||
buf = 'import cvw::*;\n`include "config.vh"\n`include "parameter-defs.vh"\n'
|
||||
|
||||
# string that will keep track of the running module header
|
||||
buf = "import cvw::*;\n`include \"config.vh\"\n`include \"parameter-defs.vh\"\n"
|
||||
# are we writing into the buffer
|
||||
writeBuf=False
|
||||
|
||||
# are we writing into the buffer
|
||||
writeBuf=False
|
||||
index=0
|
||||
|
||||
index=0
|
||||
# string copy logic
|
||||
for l in lines:
|
||||
if l.lstrip().find("module") == 0:
|
||||
lineModuleStart = index
|
||||
moduleName = l.split()[1]
|
||||
writeBuf = True
|
||||
buf += f"module {moduleName}wrapper (\n"
|
||||
continue
|
||||
if (writeBuf):
|
||||
buf += l
|
||||
if l.lstrip().find (");") == 0:
|
||||
lineModuleEnd = index
|
||||
break
|
||||
index+=1
|
||||
|
||||
# string copy logic
|
||||
for l in lines:
|
||||
if l.lstrip().find("module") == 0:
|
||||
lineModuleStart = index
|
||||
moduleName = l.split()[1]
|
||||
writeBuf = True
|
||||
buf += f"module {moduleName}wrapper (\n"
|
||||
continue
|
||||
if (writeBuf):
|
||||
buf += l
|
||||
if l.lstrip().find (");") == 0:
|
||||
lineModuleEnd = index
|
||||
break
|
||||
index+=1
|
||||
# post-processing buffer: add DUT and endmodule lines
|
||||
buf += f"\t{moduleName} #(P) dut(.*);\nendmodule"
|
||||
|
||||
# post-processing buffer: add DUT and endmodule lines
|
||||
buf += f"\t{moduleName} #(P) dut(.*);\nendmodule"
|
||||
# path to wrapper
|
||||
wrapperPath = f"{args.HDLPATH}/{moduleName}wrapper.sv"
|
||||
|
||||
# path to wrapper
|
||||
wrapperPath = f"{args.HDLPATH}/{moduleName}wrapper.sv"
|
||||
|
||||
fout = open(wrapperPath, "w")
|
||||
|
||||
fout.write(buf)
|
||||
|
||||
fin.close()
|
||||
fout.close()
|
||||
|
||||
#print(buf)
|
||||
with open(wrapperPath, "w") as fout:
|
||||
fout.write(buf)
|
||||
|
|
|
@ -7,12 +7,9 @@ import argparse
|
|||
|
||||
def runSynth(config, mod, tech, freq, maxopt, usesram):
|
||||
global pool
|
||||
if (usesram):
|
||||
prefix = "syn_sram_"
|
||||
else:
|
||||
prefix = "syn_"
|
||||
prefix = "syn_sram_" if usesram else "syn_"
|
||||
cfg = prefix + config
|
||||
command = "make synth DESIGN=wallypipelinedcore CONFIG={} MOD={} TECH={} DRIVE=FLOP FREQ={} MAXOPT={} USESRAM={} MAXCORES=1".format(cfg, mod, tech, freq, maxopt, usesram)
|
||||
command = f"make synth DESIGN=wallypipelinedcore CONFIG={cfg} MOD={mod} TECH={tech} DRIVE=FLOP FREQ={freq} MAXOPT={maxopt} USESRAM={usesram} MAXCORES=1"
|
||||
pool.map(mask, [command])
|
||||
|
||||
def mask(command):
|
||||
|
|
|
@ -1,9 +1,17 @@
|
|||
# Makefile for testbench to create .memfile, .objdump.addr, and .objdump.lab from an ELF
|
||||
# David_Harris@hmc.edu 3 July 2024
|
||||
# james.stine@okstate.edu 24 Jan 2025
|
||||
# SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
|
||||
|
||||
# the width is set by the elf's type to allow for individual elf compilation
|
||||
%.memfile: %
|
||||
riscv64-unknown-elf-elf2hex --bit-width $(if $(findstring rv32,$*),32,64) --input $< --output $@
|
||||
@if grep -q 'elf32' $*.objdump; then \
|
||||
BIT_WIDTH=32; \
|
||||
else \
|
||||
BIT_WIDTH=64; \
|
||||
fi; \
|
||||
echo "Processing $< with --bit-width $$BIT_WIDTH"; \
|
||||
riscv64-unknown-elf-elf2hex --bit-width $$BIT_WIDTH --input $< --output $@
|
||||
|
||||
%.objdump.addr: %.objdump
|
||||
extractFunctionRadix.sh $<
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
///////////////////////////////////////////
|
||||
// checksignature.sv
|
||||
//
|
||||
// Written: David Harris David_Harris@hmc.edu
|
||||
// Modified: 14 June 2023
|
||||
//
|
||||
// Purpose: Verifies the memory signature.
|
||||
//
|
||||
// A component of the Wally configurable RISC-V project.
|
||||
//
|
||||
// Copyright (C) 2021 Harvey Mudd College & Oklahoma State University
|
||||
//
|
||||
// SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
|
||||
//
|
||||
// Licensed under the Solderpad Hardware License v 2.1 (the “License”); you may not use this file
|
||||
// except in compliance with the License, or, at your option, the Apache License version 2.0. You
|
||||
// may obtain a copy of the License at
|
||||
//
|
||||
// https://solderpad.org/licenses/SHL-2.1/
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, any work distributed under the
|
||||
// License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
|
||||
// either express or implied. See the License for the specific language governing permissions
|
||||
// and limitations under the License.
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
|
@ -23,7 +23,7 @@
|
|||
// and limitations under the License.
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
module FunctionName import cvw::*; #(parameter cvw_t P) (
|
||||
module functionName import cvw::*; #(parameter cvw_t P) (
|
||||
input logic reset,
|
||||
input logic clk,
|
||||
input string ProgramAddrMapFile,
|
||||
|
|
|
@ -84,11 +84,11 @@ module loggers import cvw::*; #(parameter cvw_t P,
|
|||
|
||||
always_comb
|
||||
if (TEST == "embench") begin
|
||||
StartSampleFirst = FunctionName.FunctionName.FunctionName == "start_trigger";
|
||||
EndSampleFirst = FunctionName.FunctionName.FunctionName == "stop_trigger";
|
||||
StartSampleFirst = functionName.functionName.FunctionName == "start_trigger";
|
||||
EndSampleFirst = functionName.functionName.FunctionName == "stop_trigger";
|
||||
end else if (TEST == "coremark") begin
|
||||
StartSampleFirst = FunctionName.FunctionName.FunctionName == "start_time";
|
||||
EndSampleFirst = FunctionName.FunctionName.FunctionName == "stop_time";
|
||||
StartSampleFirst = functionName.functionName.FunctionName == "start_time";
|
||||
EndSampleFirst = functionName.functionName.FunctionName == "stop_time";
|
||||
end else begin
|
||||
StartSampleFirst = reset;
|
||||
EndSampleFirst = '0;
|
||||
|
@ -106,22 +106,22 @@ module loggers import cvw::*; #(parameter cvw_t P,
|
|||
if(TEST == "embench") begin
|
||||
// embench runs warmup then runs start_trigger
|
||||
// embench end with stop_trigger.
|
||||
//assign StartSampleFirst = FunctionName.FunctionName.FunctionName == "start_trigger";
|
||||
//assign StartSampleFirst = functionName.functionName.FunctionName == "start_trigger";
|
||||
//flopr #(1) StartSampleReg(clk, reset, StartSampleFirst, StartSampleDelayed);
|
||||
//assign StartSample = StartSampleFirst & ~ StartSampleDelayed;
|
||||
|
||||
//assign EndSampleFirst = FunctionName.FunctionName.FunctionName == "stop_trigger";
|
||||
//assign EndSampleFirst = functionName.functionName.FunctionName == "stop_trigger";
|
||||
flopr #(1) EndSampleReg(clk, reset, EndSampleFirst, EndSampleDelayed);
|
||||
assign EndSample = EndSampleFirst & ~ EndSampleDelayed;
|
||||
|
||||
end else if(TEST == "coremark") begin
|
||||
// embench runs warmup then runs start_trigger
|
||||
// embench end with stop_trigger.
|
||||
//assign StartSampleFirst = FunctionName.FunctionName.FunctionName == "start_time";
|
||||
//assign StartSampleFirst = functionName.functionName.FunctionName == "start_time";
|
||||
//flopr #(1) StartSampleReg(clk, reset, StartSampleFirst, StartSampleDelayed);
|
||||
//assign StartSample = StartSampleFirst & ~ StartSampleDelayed;
|
||||
|
||||
//assign EndSampleFirst = FunctionName.FunctionName.FunctionName == "stop_time";
|
||||
//assign EndSampleFirst = functionName.functionName.FunctionName == "stop_time";
|
||||
flopr #(1) EndSampleReg(clk, reset, EndSampleFirst, EndSampleDelayed);
|
||||
assign EndSample = EndSampleFirst & ~ EndSampleDelayed;
|
||||
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
// wallyTracer.sv
|
||||
//
|
||||
// A component of the Wally configurable RISC-V project.
|
||||
// Implements a RISC-V Verification Interface (RVVI)
|
||||
// to support functional coverage and lockstep simulation.
|
||||
//
|
||||
// Copyright (C) 2021 Harvey Mudd College & Oklahoma State University
|
||||
//
|
||||
|
@ -20,19 +22,16 @@
|
|||
////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
`define NUM_REGS 32
|
||||
`define NUM_CSRS 4096
|
||||
|
||||
`define STD_LOG 0
|
||||
`define PRINT_PC_INSTR 0
|
||||
`define PRINT_MOST 0
|
||||
`define PRINT_ALL 0
|
||||
`define PRINT_CSRS 0
|
||||
|
||||
|
||||
module wallyTracer import cvw::*; #(parameter cvw_t P) (rvviTrace rvvi);
|
||||
|
||||
localparam NUMREGS = P.E_SUPPORTED ? 16 : 32;
|
||||
localparam NUM_REGS = P.E_SUPPORTED ? 16 : 32;
|
||||
localparam NUM_CSRS = 4096;
|
||||
|
||||
// wally specific signals
|
||||
logic reset;
|
||||
|
@ -50,17 +49,17 @@ module wallyTracer import cvw::*; #(parameter cvw_t P) (rvviTrace rvvi);
|
|||
logic TrapM, TrapW;
|
||||
logic HaltM, HaltW;
|
||||
logic [1:0] PrivilegeModeW;
|
||||
logic [P.XLEN-1:0] rf[NUMREGS];
|
||||
logic [NUMREGS-1:0] rf_wb;
|
||||
logic [P.XLEN-1:0] rf[NUM_REGS];
|
||||
logic [NUM_REGS-1:0] rf_wb;
|
||||
logic [4:0] rf_a3;
|
||||
logic rf_we3;
|
||||
logic [P.FLEN-1:0] frf[32];
|
||||
logic [`NUM_REGS-1:0] frf_wb;
|
||||
logic [31:0] frf_wb;
|
||||
logic [4:0] frf_a4;
|
||||
logic frf_we4;
|
||||
logic [P.XLEN-1:0] CSRArray [4095:0];
|
||||
logic [P.XLEN-1:0] CSRArrayOld [4095:0];
|
||||
logic [`NUM_CSRS-1:0] CSR_W;
|
||||
logic [NUM_CSRS-1:0] CSR_W;
|
||||
logic CSRWriteM, CSRWriteW;
|
||||
logic [11:0] CSRAdrM, CSRAdrW;
|
||||
logic wfiM;
|
||||
|
@ -314,8 +313,8 @@ module wallyTracer import cvw::*; #(parameter cvw_t P) (rvviTrace rvvi);
|
|||
|
||||
genvar index;
|
||||
assign rf[0] = 0;
|
||||
for(index = 1; index < NUMREGS; index += 1)
|
||||
assign rf[index] = testbench.dut.core.ieu.dp.regf.rf[index];
|
||||
for(index = 1; index < NUM_REGS; index += 1)
|
||||
assign rf[index] = testbench.dut.core.ieu.dp.regf.rf[index];
|
||||
|
||||
assign rf_a3 = testbench.dut.core.ieu.dp.regf.a3;
|
||||
assign rf_we3 = testbench.dut.core.ieu.dp.regf.we3;
|
||||
|
@ -329,12 +328,12 @@ module wallyTracer import cvw::*; #(parameter cvw_t P) (rvviTrace rvvi);
|
|||
if (P.F_SUPPORTED) begin
|
||||
assign frf_a4 = testbench.dut.core.fpu.fpu.fregfile.a4;
|
||||
assign frf_we4 = testbench.dut.core.fpu.fpu.fregfile.we4;
|
||||
for(index = 0; index < NUMREGS; index += 1)
|
||||
for(index = 0; index < 32; index += 1)
|
||||
assign frf[index] = testbench.dut.core.fpu.fpu.fregfile.rf[index];
|
||||
end else begin
|
||||
assign frf_a4 = '0;
|
||||
assign frf_we4 = 0;
|
||||
for(index = 0; index < NUMREGS; index += 1)
|
||||
for(index = 0; index < 32; index += 1)
|
||||
assign frf[index] = '0;
|
||||
end
|
||||
|
||||
|
@ -364,25 +363,25 @@ module wallyTracer import cvw::*; #(parameter cvw_t P) (rvviTrace rvvi);
|
|||
//for VM Verification
|
||||
flopenrc #(P.XLEN) IVAdrDReg (clk, reset, 1'b0, SelHPTW, IVAdrF, IVAdrD); //Virtual Address for IMMU // *** RT: possible bug SelHPTW probably should be ~StallD
|
||||
flopenrc #(P.XLEN) IVAdrEReg (clk, reset, 1'b0, ~StallE, IVAdrD, IVAdrE); //Virtual Address for IMMU
|
||||
flopenrc #(P.XLEN) IVAdrMReg (clk, reset, 1'b0, ~StallM, IVAdrE, IVAdrM); //Virtual Address for IMMU
|
||||
flopenrc #(P.XLEN) IVAdrMReg (clk, reset, 1'b0, ~(StallM & ~SelHPTW), IVAdrE, IVAdrM); //Virtual Address for IMMU
|
||||
flopenrc #(P.XLEN) IVAdrWReg (clk, reset, 1'b0, SelHPTW, IVAdrM, IVAdrW); //Virtual Address for IMMU // *** RT: possible bug SelHPTW probably should be ~GatedStallW
|
||||
flopenrc #(P.XLEN) DVAdrWReg (clk, reset, 1'b0, SelHPTW, DVAdrM, DVAdrW); //Virtual Address for DMMU // *** RT: possible bug SelHPTW probably should be ~GatedStallW
|
||||
|
||||
flopenrc #(P.PA_BITS) IPADReg (clk, reset, 1'b0, SelHPTW, IPAF, IPAD); //Physical Address for IMMU // *** RT: possible bug SelHPTW probably should be ~StallD
|
||||
flopenrc #(P.PA_BITS) IPAEReg (clk, reset, 1'b0, ~StallE, IPAD, IPAE); //Physical Address for IMMU
|
||||
flopenrc #(P.PA_BITS) IPAMReg (clk, reset, 1'b0, ~StallM, IPAE, IPAM); //Physical Address for IMMU
|
||||
flopenrc #(P.PA_BITS) IPAMReg (clk, reset, 1'b0, ~(StallM & ~SelHPTW), IPAE, IPAM); //Physical Address for IMMU
|
||||
flopenrc #(P.PA_BITS) IPAWReg (clk, reset, 1'b0, SelHPTW, IPAM, IPAW); //Physical Address for IMMU // *** RT: possible bug SelHPTW probably should be ~GatedStallW
|
||||
flopenrc #(P.PA_BITS) DPAWReg (clk, reset, 1'b0, SelHPTW, DPAM, DPAW); //Physical Address for DMMU // *** RT: possible bug SelHPTW probably should be ~GatedStallW
|
||||
|
||||
flopenrc #(P.XLEN) IPTEDReg (clk, reset, 1'b0, SelHPTW, IPTEF, IPTED); //PTE for IMMU // *** RT: possible bug SelHPTW probably should be ~StallD
|
||||
flopenrc #(P.XLEN) IPTEEReg (clk, reset, 1'b0, ~StallE, IPTED, IPTEE); //PTE for IMMU
|
||||
flopenrc #(P.XLEN) IPTEMReg (clk, reset, 1'b0, ~StallM, IPTEE, IPTEM); //PTE for IMMU
|
||||
flopenrc #(P.XLEN) IPTEMReg (clk, reset, 1'b0, ~(StallM & ~SelHPTW), IPTEE, IPTEM); //PTE for IMMU
|
||||
flopenrc #(P.XLEN) IPTEWReg (clk, reset, 1'b0, SelHPTW, IPTEM, IPTEW); //PTE for IMMU // *** RT: possible bug SelHPTW probably should be ~GatedStallW
|
||||
flopenrc #(P.XLEN) DPTEWReg (clk, reset, 1'b0, SelHPTW, DPTEM, DPTEW); //PTE for DMMU // *** RT: possible bug SelHPTW probably should be ~GatedStallW
|
||||
|
||||
flopenrc #(2) IPageTypeDReg (clk, reset, 1'b0, SelHPTW, IPageTypeF, IPageTypeD); //PageType (kilo, mega, giga, tera) from IMMU // *** RT: possible bug SelHPTW probably should be ~StallD
|
||||
flopenrc #(2) IPageTypeEReg (clk, reset, 1'b0, ~StallE, IPageTypeD, IPageTypeE); //PageType (kilo, mega, giga, tera) from IMMU
|
||||
flopenrc #(2) IPageTypeMReg (clk, reset, 1'b0, ~StallM, IPageTypeE, IPageTypeM); //PageType (kilo, mega, giga, tera) from IMMU
|
||||
flopenrc #(2) IPageTypeMReg (clk, reset, 1'b0, ~(StallM & ~SelHPTW), IPageTypeE, IPageTypeM); //PageType (kilo, mega, giga, tera) from IMMU
|
||||
flopenrc #(2) IPageTypeWReg (clk, reset, 1'b0, SelHPTW, IPageTypeM, IPageTypeW); //PageType (kilo, mega, giga, tera) from IMMU // *** RT: possible bug SelHPTW probably should be ~GatedStallW
|
||||
flopenrc #(2) DPageTypeWReg (clk, reset, 1'b0, SelHPTW, DPageTypeM, DPageTypeW); //PageType (kilo, mega, giga, tera) from DMMU // *** RT: possible bug SelHPTW probably should be ~GatedStallW
|
||||
|
||||
|
@ -420,9 +419,11 @@ module wallyTracer import cvw::*; #(parameter cvw_t P) (rvviTrace rvvi);
|
|||
~FlushE ? PCD :
|
||||
~FlushD ? PCF : PCNextF;
|
||||
|
||||
for(index = 0; index < `NUM_REGS; index += 1) begin
|
||||
for(index = 0; index < NUM_REGS; index += 1) begin
|
||||
assign rvvi.x_wdata[0][0][index] = rf[index];
|
||||
assign rvvi.x_wb[0][0][index] = rf_wb[index];
|
||||
end
|
||||
for(index = 0; index < 32; index += 1) begin
|
||||
assign rvvi.f_wdata[0][0][index] = frf[index];
|
||||
assign rvvi.f_wb[0][0][index] = frf_wb[index];
|
||||
end
|
||||
|
@ -744,18 +745,18 @@ module wallyTracer import cvw::*; #(parameter cvw_t P) (rvviTrace rvvi);
|
|||
if(valid) begin
|
||||
if(`STD_LOG) begin
|
||||
$fwrite(file, "%016x, %08x, %s\t\t", rvvi.pc_rdata[0][0], rvvi.insn[0][0], instrWName);
|
||||
for(index2 = 0; index2 < `NUM_REGS; index2 += 1) begin
|
||||
for(index2 = 0; index2 < NUM_REGS; index2 += 1) begin
|
||||
if(rvvi.x_wb[0][0][index2]) begin
|
||||
$fwrite(file, "rf[%02d] = %016x ", index2, rvvi.x_wdata[0][0][index2]);
|
||||
end
|
||||
end
|
||||
end
|
||||
for(index2 = 0; index2 < `NUM_REGS; index2 += 1) begin
|
||||
for(index2 = 0; index2 < 32; index2 += 1) begin
|
||||
if(rvvi.f_wb[0][0][index2]) begin
|
||||
$fwrite(file, "frf[%02d] = %016x ", index2, rvvi.f_wdata[0][0][index2]);
|
||||
end
|
||||
end
|
||||
for(index2 = 0; index2 < `NUM_CSRS; index2 += 1) begin
|
||||
for(index2 = 0; index2 < NUM_CSRS; index2 += 1) begin
|
||||
if(rvvi.csr_wb[0][0][index2]) begin
|
||||
$fwrite(file, "csr[%03x] = %016x ", index2, rvvi.csr[0][0][index2]);
|
||||
end
|
||||
|
@ -769,15 +770,15 @@ module wallyTracer import cvw::*; #(parameter cvw_t P) (rvviTrace rvvi);
|
|||
else if(`PRINT_ALL) begin
|
||||
$display("order = %08d, PC = %08x, insn = %08x, trap = %1d, halt = %1d, intr = %1d, mode = %1x, ixl = %1x, pc_wdata = %08x",
|
||||
rvvi.order[0][0], rvvi.pc_rdata[0][0], rvvi.insn[0][0], rvvi.trap[0][0], rvvi.halt[0][0], rvvi.intr[0][0], rvvi.mode[0][0], rvvi.ixl[0][0], rvvi.pc_wdata[0][0]);
|
||||
for(index2 = 0; index2 < `NUM_REGS; index2 += 1) begin
|
||||
for(index2 = 0; index2 < NUM_REGS; index2 += 1) begin
|
||||
$display("x%02d = %08x", index2, rvvi.x_wdata[0][0][index2]);
|
||||
end
|
||||
for(index2 = 0; index2 < `NUM_REGS; index2 += 1) begin
|
||||
for(index2 = 0; index2 < 32; index2 += 1) begin
|
||||
$display("f%02d = %08x", index2, rvvi.f_wdata[0][0][index2]);
|
||||
end
|
||||
end
|
||||
if (`PRINT_CSRS) begin
|
||||
for(index2 = 0; index2 < `NUM_CSRS; index2 += 1) begin
|
||||
for(index2 = 0; index2 < NUM_CSRS; index2 += 1) begin
|
||||
if(CSR_W[index2]) begin
|
||||
$display("%t: CSR %03x = %x", $time(), index2, CSRArray[index2]);
|
||||
end
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
typedef RISCV_instruction #(ILEN, XLEN, FLEN, VLEN, NHART, RETIRE) test_ins_rv64i_t;
|
||||
|
||||
covergroup test_fencei_cg with function sample(test_ins_rv64i_t ins);
|
||||
option.per_instance = 1;
|
||||
option.comment = "Fence.I";
|
||||
|
||||
cp_asm_count : coverpoint ins.ins_str == "fence.i" iff (ins.trap == 0 ) {
|
||||
option.comment = "Number of times instruction is executed";
|
||||
bins count[] = {1};
|
||||
}
|
||||
endgroup
|
||||
|
||||
function void test_fencei_sample(int hart, int issue);
|
||||
test_ins_rv64i_t ins;
|
||||
|
||||
case (traceDataQ[hart][issue][0].inst_name)
|
||||
"fenci" : begin
|
||||
ins = new(hart, issue, traceDataQ);
|
||||
test_fencei_cg.sample(ins);
|
||||
end
|
||||
endcase
|
||||
|
||||
endfunction
|
||||
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
test_fencei_cg = new(); test_fencei_cg.set_inst_name("obj_fencei");
|
||||
|
||||
// test_fencei_cg = new();
|
||||
//test_fencei_cg.set_inst_name("obj_fencei");
|
|
@ -1,108 +0,0 @@
|
|||
module test_pmp_coverage import cvw::*; #(parameter cvw_t P) (input clk);
|
||||
|
||||
// Ensure the covergroup is defined correctly
|
||||
covergroup cg_priv_mode @(posedge clk);
|
||||
coverpoint dut.core.ifu.PrivilegeModeW {
|
||||
bins user = {2'b00};
|
||||
bins superv = {2'b01};
|
||||
bins hyperv = {2'b10};
|
||||
bins mach = {2'b11};
|
||||
}
|
||||
endgroup
|
||||
|
||||
covergroup cg_PMPConfig @(posedge clk);
|
||||
coverpoint dut.core.ifu.PMPCFG_ARRAY_REGW[0][0] {
|
||||
bins ones = {1};
|
||||
bins zeros = {0};
|
||||
}
|
||||
endgroup
|
||||
|
||||
|
||||
function bit [1:0] getPMPConfigSlice(int index);
|
||||
return dut.core.ifu.immu.immu.PMPCFG_ARRAY_REGW[index][4:3];
|
||||
endfunction
|
||||
|
||||
//if (P.PMP_ENTRIES > 0) begin : pmp
|
||||
covergroup cg_pmpcfg_mode @(posedge clk);
|
||||
coverpoint dut.core.ifu.immu.immu.PMPCFG_ARRAY_REGW[0][4:3] {
|
||||
bins off = {2'b00};
|
||||
bins tor = {2'b01};
|
||||
bins na4 = {2'b10};
|
||||
bins napot = {2'b11};
|
||||
}
|
||||
|
||||
|
||||
coverpoint dut.core.ifu.immu.immu.PMPCFG_ARRAY_REGW[1][4:3] {
|
||||
bins off = {2'b00};
|
||||
bins tor = {2'b01};
|
||||
bins na4 = {2'b10};
|
||||
bins napot = {2'b11};
|
||||
}
|
||||
|
||||
coverpoint dut.core.ifu.immu.immu.PMPCFG_ARRAY_REGW[2][4:3] {
|
||||
bins off = {2'b00};
|
||||
bins tor = {2'b01};
|
||||
bins na4 = {2'b10};
|
||||
bins napot = {2'b11};
|
||||
}
|
||||
|
||||
coverpoint dut.core.ifu.immu.immu.PMPCFG_ARRAY_REGW[3][4:3] {
|
||||
bins off = {2'b00};
|
||||
bins tor = {2'b01};
|
||||
bins na4 = {2'b10};
|
||||
bins napot = {2'b11};
|
||||
}
|
||||
|
||||
coverpoint dut.core.ifu.immu.immu.PMPCFG_ARRAY_REGW[4][4:3] {
|
||||
bins off = {2'b00};
|
||||
bins tor = {2'b01};
|
||||
bins na4 = {2'b10};
|
||||
bins napot = {2'b11};
|
||||
}
|
||||
|
||||
coverpoint dut.core.ifu.immu.immu.PMPCFG_ARRAY_REGW[5][4:3] {
|
||||
bins off = {2'b00};
|
||||
bins tor = {2'b01};
|
||||
bins na4 = {2'b10};
|
||||
bins napot = {2'b11};
|
||||
}
|
||||
|
||||
coverpoint dut.core.ifu.immu.immu.PMPCFG_ARRAY_REGW[6][4:3] {
|
||||
bins off = {2'b00};
|
||||
bins tor = {2'b01};
|
||||
bins na4 = {2'b10};
|
||||
bins napot = {2'b11};
|
||||
}
|
||||
|
||||
coverpoint dut.core.ifu.immu.immu.PMPCFG_ARRAY_REGW[7][4:3] {
|
||||
bins off = {2'b00};
|
||||
bins tor = {2'b01};
|
||||
bins na4 = {2'b10};
|
||||
bins napot = {2'b11};
|
||||
}
|
||||
endgroup
|
||||
//end
|
||||
|
||||
|
||||
// Ensure that the instantiation and sampling of covergroups are within the correct procedural context
|
||||
initial begin
|
||||
cg_priv_mode privmodeCG = new(); // Instantiate the privilege mode covergroup
|
||||
cg_PMPConfig pmpconfigCG = new(); // Instantiate the PMP config covergroup
|
||||
cg_pmpcfg_mode pmpcfgmodeCG = new();
|
||||
|
||||
forever begin
|
||||
@(posedge clk) begin
|
||||
privmodeCG.sample(); // Sample the privilege mode covergroup
|
||||
pmpconfigCG.sample(); // Sample the PMP config covergroupi
|
||||
pmpcfgmodeCG.sample();
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
endmodule
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,12 +1,14 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys, fileinput
|
||||
import sys
|
||||
import fileinput
|
||||
|
||||
address = 0
|
||||
|
||||
|
||||
for line in fileinput.input('-'):
|
||||
# the 14- is to reverse the byte order to little endian
|
||||
formatedLine = ' '.join(line[14-i:14-i+2] for i in range(0, len(line), 2))
|
||||
sys.stdout.write('@{:08x} {:s}\n'.format(address, formatedLine))
|
||||
address+=8
|
||||
with fileinput.input('-') as f:
|
||||
for line in f:
|
||||
# the 14- is to reverse the byte order to little endian
|
||||
formatedLine = ' '.join(line[14-i:14-i+2] for i in range(0, len(line), 2))
|
||||
sys.stdout.write(f'@{address:08x} {formatedLine:s}\n')
|
||||
address+=8
|
||||
|
|
|
@ -684,8 +684,8 @@ module testbench;
|
|||
loggers (clk, reset, DCacheFlushStart, DCacheFlushDone, memfilename, TEST);
|
||||
|
||||
// track the current function or global label
|
||||
if (DEBUG > 0 | ((PrintHPMCounters | BPRED_LOGGER) & P.ZICNTR_SUPPORTED)) begin : FunctionName
|
||||
FunctionName #(P) FunctionName(.reset(reset_ext | TestBenchReset),
|
||||
if (DEBUG > 0 | ((PrintHPMCounters | BPRED_LOGGER) & P.ZICNTR_SUPPORTED)) begin : functionName
|
||||
functionName #(P) functionName(.reset(reset_ext | TestBenchReset),
|
||||
.clk(clk), .ProgramAddrMapFile(ProgramAddrMapFile), .ProgramLabelMapFile(ProgramLabelMapFile));
|
||||
end
|
||||
|
||||
|
@ -710,11 +710,11 @@ module testbench;
|
|||
|
||||
always @(posedge clk) begin
|
||||
// if (reset) PrevPCZero <= 0;
|
||||
// else if (dut.core.InstrValidM) PrevPCZero <= (FunctionName.PCM == 0 & dut.core.ifu.InstrM == 0);
|
||||
// else if (dut.core.InstrValidM) PrevPCZero <= (functionName.PCM == 0 & dut.core.ifu.InstrM == 0);
|
||||
TestComplete <= ((InstrM == 32'h6f) & dut.core.InstrValidM ) |
|
||||
((dut.core.lsu.IEUAdrM == ProgramAddrLabelArray["tohost"] & dut.core.lsu.IEUAdrM != 0) & InstrMName == "SW"); // |
|
||||
// (FunctionName.PCM == 0 & dut.core.ifu.InstrM == 0 & dut.core.InstrValidM & PrevPCZero));
|
||||
// if (FunctionName.PCM == 0 & dut.core.ifu.InstrM == 0 & dut.core.InstrValidM & PrevPCZero)
|
||||
// (functionName.PCM == 0 & dut.core.ifu.InstrM == 0 & dut.core.InstrValidM & PrevPCZero));
|
||||
// if (functionName.PCM == 0 & dut.core.ifu.InstrM == 0 & dut.core.InstrValidM & PrevPCZero)
|
||||
// $error("Program fetched illegal instruction 0x00000000 from address 0x00000000 twice in a row. Usually due to fault with no fault handler.");
|
||||
end
|
||||
|
||||
|
@ -751,6 +751,8 @@ end
|
|||
.CMP_CSR (1)
|
||||
) idv_trace2api(rvvi);
|
||||
|
||||
// `include "RV_Assertions.sv"
|
||||
|
||||
string filename;
|
||||
initial begin
|
||||
// imperasDV requires the elffile be defined at the begining of the simulation.
|
||||
|
|
|
@ -154,7 +154,11 @@ trek:
|
|||
doc: >-
|
||||
Verbatim code that will be put into the header section of the test.
|
||||
value: |-
|
||||
|
||||
// enable mtimer interrupts
|
||||
#define TREK_MTIME_BASE (0x0200bff8)
|
||||
#define TREK_MTIMECMP_BASE (0x02004000)
|
||||
#define TREK_MSWI_BASE (0x02000000)
|
||||
#define TREK_MTIMECMP_DELAY 0x1000
|
||||
declaration:
|
||||
doc: >-
|
||||
Verbatim code that will be put into the declaration section of the test.
|
||||
|
|
|
@ -14,7 +14,7 @@ TREKSVIP := source $(TREKFILES)/breker-setup.sh && treksvip -p $(PLATFOR
|
|||
# Compilation paths and variables
|
||||
START_LIB_DIR := $(WALLY)/examples/C/common
|
||||
START_LIB := $(START_LIB_DIR)/crt.S $(START_LIB_DIR)/syscalls.c
|
||||
MARCH :=-march=rv64gc_zcb_zfa_zba_zbb_zbc_zbs_zfh_zicboz_zicbop_zicbom_zbkb_zbkx_zknd_zkne_zknh_svinval
|
||||
MARCH :=-march=rv64gc_zcb_zfa_zba_zbb_zbc_zbs_zfh_zicboz_zicbop_zicbom_zicond_zbkb_zbkx_zknd_zkne_zknh_svinval
|
||||
MABI :=-mabi=lp64d
|
||||
LINKER := $(START_LIB_DIR)/test.ld
|
||||
LINK_FLAGS := -nostartfiles
|
||||
|
|
|
@ -40,9 +40,9 @@ mem_addr = mem_start_addr
|
|||
|
||||
def wl(line="", comment=None, fname=test_name):
|
||||
with open(fname, "a") as f:
|
||||
instr = False if (":" in line or
|
||||
".align" in line or
|
||||
"# include" in line) else True
|
||||
instr = not (":" in line or
|
||||
".align" in line or
|
||||
"# include" in line)
|
||||
indent = 6 if instr else 0
|
||||
comment = "// " + comment if comment is not None else ""
|
||||
to_write = " " * indent + line + comment + "\n"
|
||||
|
@ -78,7 +78,7 @@ if __name__ == "__main__":
|
|||
for i in range(dcache_num_ways):
|
||||
wl(comment=f"start way test #{i+1}")
|
||||
wl(f'li t0, {hex(mem_addr)}')
|
||||
wl(f'.align 6') # start at i$ set boundary. 6 lsb bits are zero.
|
||||
wl('.align 6') # start at i$ set boundary. 6 lsb bits are zero.
|
||||
wl(comment=f"i$ boundary, way test #{i+1}")
|
||||
write_repro_instrs()
|
||||
mem_addr += dcache_way_size_in_bytes # so that we excercise a new D$ way.
|
||||
|
|
|
@ -60,9 +60,9 @@ class Config:
|
|||
def create_vectors(my_config):
|
||||
suite_folder_num = my_config.bits
|
||||
if my_config.bits == 64 and my_config.letter == "F": suite_folder_num = 32
|
||||
source_dir1 = "{}/addins/riscv-arch-test/riscv-test-suite/rv{}i_m/{}/src/".format(wally, suite_folder_num, my_config.letter)
|
||||
source_dir2 = "{}/tests/riscof/work/riscv-arch-test/rv{}i_m/{}/src/".format(wally, my_config.bits, my_config.letter)
|
||||
dest_dir = "{}/tests/fp/combined_IF_vectors/IF_vectors/".format(wally)
|
||||
source_dir1 = f"{wally}/addins/riscv-arch-test/riscv-test-suite/rv{suite_folder_num}i_m/{my_config.letter}/src/"
|
||||
source_dir2 = f"{wally}/tests/riscof/work/riscv-arch-test/rv{my_config.bits}i_m/{my_config.letter}/src/"
|
||||
dest_dir = f"{wally}/tests/fp/combined_IF_vectors/IF_vectors/"
|
||||
all_vectors1 = os.listdir(source_dir1)
|
||||
|
||||
filt_vectors1 = [v for v in all_vectors1 if my_config.filt in v]
|
||||
|
@ -76,218 +76,212 @@ def create_vectors(my_config):
|
|||
operation = my_config.op_code
|
||||
rounding_mode = "X"
|
||||
flags = "XX"
|
||||
# use name to create our new tv
|
||||
dest_file = open("{}cvw_{}_{}.tv".format(dest_dir, my_config.bits, vector1[:-2]), 'w')
|
||||
# open vectors
|
||||
src_file1 = open(source_dir1 + vector1,'r')
|
||||
src_file2 = open(source_dir2 + vector2,'r')
|
||||
# for each test in the vector
|
||||
reading = True
|
||||
src_file2.readline() #skip first bc junk
|
||||
# print(my_config.bits, my_config.letter)
|
||||
if my_config.letter == "F" and my_config.bits == 64:
|
||||
# use name to create our new tv and open vectors
|
||||
with open(f"{dest_dir}cvw_{my_config.bits}_{vector1[:-2]}.tv", 'w') as dest_file, open(source_dir1 + vector1) as src_file1, open(source_dir2 + vector2) as src_file2:
|
||||
# for each test in the vector
|
||||
reading = True
|
||||
# print("trigger 64F")
|
||||
#skip first 2 lines bc junk
|
||||
src_file2.readline()
|
||||
while reading:
|
||||
# get answer and flags from Ref...signature
|
||||
# answers are before deadbeef (first line of 4)
|
||||
# flags are after deadbeef (third line of 4)
|
||||
answer = src_file2.readline().strip()
|
||||
deadbeef = src_file2.readline().strip()
|
||||
# print(answer)
|
||||
if not (answer == "e7d4b281" and deadbeef == "6f5ca309"): # if there is still stuff to read
|
||||
# get flags
|
||||
packed = src_file2.readline().strip()[6:]
|
||||
flags, rounding_mode = unpack_rf(packed)
|
||||
# skip 00000000 buffer
|
||||
src_file2.readline()
|
||||
|
||||
# parse through .S file
|
||||
detected = False
|
||||
done = False
|
||||
op1val = "0"
|
||||
op2val = "0"
|
||||
while not (detected or done):
|
||||
# print("det1")
|
||||
line = src_file1.readline()
|
||||
# print(line)
|
||||
if "op1val" in line:
|
||||
# print("det2")
|
||||
# parse line
|
||||
|
||||
# handle special case where destination register is hardwired to zero
|
||||
if "dest:x0" in line:
|
||||
answer = "x" * len(answer)
|
||||
|
||||
op1val = line.split("op1val")[1].split("x")[1].split(";")[0]
|
||||
if my_config.op != "fsqrt": # sqrt doesn't have two input vals
|
||||
op2val = line.split("op2val")[1].split("x")[1].strip()
|
||||
if op2val[-1] == ";": op2val = op2val[:-1] # remove ; if it's there
|
||||
else:
|
||||
op2val = 32*"X"
|
||||
# go to next test in vector
|
||||
detected = True
|
||||
elif "RVTEST_CODE_END" in line:
|
||||
done = True
|
||||
# put it all together
|
||||
if not done:
|
||||
translation = "{}_{}_{}_{}_{}_{}".format(operation, ext_bits(op1val), ext_bits(op2val), ext_bits(answer.strip()), flags, rounding_mode)
|
||||
dest_file.write(translation + "\n")
|
||||
else:
|
||||
# print("read false")
|
||||
reading = False
|
||||
elif my_config.letter == "M" and my_config.bits == 64:
|
||||
reading = True
|
||||
#skip first 2 lines bc junk
|
||||
src_file2.readline()
|
||||
while reading:
|
||||
# print("trigger 64M")
|
||||
# get answer from Ref...signature
|
||||
# answers span two lines and are reversed
|
||||
answer2 = src_file2.readline().strip()
|
||||
answer1 = src_file2.readline().strip()
|
||||
answer = answer1 + answer2
|
||||
#print(answer1,answer2)
|
||||
if not (answer2 == "e7d4b281" and answer1 == "6f5ca309"): # if there is still stuff to read
|
||||
# parse through .S file
|
||||
detected = False
|
||||
done = False
|
||||
op1val = "0"
|
||||
op2val = "0"
|
||||
while not (detected or done):
|
||||
# print("det1")
|
||||
line = src_file1.readline()
|
||||
# print(line)
|
||||
if "op1val" in line:
|
||||
# print("det2")
|
||||
# parse line
|
||||
# handle special case where destination register is hardwired to zero
|
||||
if "dest:x0" in line:
|
||||
answer = "x" * len(answer)
|
||||
op1val = line.split("op1val")[1].split("x")[1].split(";")[0]
|
||||
if "-" in line.split("op1val")[1].split("x")[0]: # neg sign handling
|
||||
op1val = twos_comp(my_config.bits, op1val)
|
||||
if my_config.op != "fsqrt": # sqrt doesn't have two input vals, unnec here but keeping for later
|
||||
op2val = line.split("op2val")[1].split("x")[1].strip()
|
||||
if op2val[-1] == ";": op2val = op2val[:-1] # remove ; if it's there
|
||||
if "-" in line.split("op2val")[1].split("x")[0]: # neg sign handling
|
||||
op2val = twos_comp(my_config.bits, op2val)
|
||||
# go to next test in vector
|
||||
detected = True
|
||||
elif "RVTEST_CODE_END" in line:
|
||||
done = True
|
||||
# ints don't have flags
|
||||
flags = "XX"
|
||||
# put it all together
|
||||
if not done:
|
||||
translation = "{}_{}_{}_{}_{}_{}".format(operation, ext_bits(op1val), ext_bits(op2val), ext_bits(answer.strip()), flags.strip(), rounding_mode)
|
||||
dest_file.write(translation + "\n")
|
||||
else:
|
||||
# print("read false")
|
||||
reading = False
|
||||
elif my_config.letter == "M" and my_config.bits == 32:
|
||||
reading = True
|
||||
while reading:
|
||||
# print("trigger 64M")
|
||||
# get answer from Ref...signature
|
||||
# answers span two lines and are reversed
|
||||
answer = src_file2.readline().strip()
|
||||
# print(f"Answer: {answer}")
|
||||
#print(answer1,answer2)
|
||||
if not (answer == "6f5ca309"): # if there is still stuff to read
|
||||
# parse through .S file
|
||||
detected = False
|
||||
done = False
|
||||
op1val = "0"
|
||||
op2val = "0"
|
||||
while not (detected or done):
|
||||
# print("det1")
|
||||
line = src_file1.readline()
|
||||
# print(line)
|
||||
if "op1val" in line:
|
||||
# print("det2")
|
||||
# parse line
|
||||
# handle special case where destination register is hardwired to zero
|
||||
if "dest:x0" in line:
|
||||
answer = "x" * len(answer)
|
||||
op1val = line.split("op1val")[1].split("x")[1].split(";")[0]
|
||||
if "-" in line.split("op1val")[1].split("x")[0]: # neg sign handling
|
||||
op1val = line.split("op1val")[1].split("x")[1].split(";")[0]
|
||||
if "-" in line.split("op1val")[1].split("x")[0]: # neg sign handling
|
||||
op1val = twos_comp(my_config.bits, op1val)
|
||||
if my_config.op != "fsqrt": # sqrt doesn't have two input vals, unnec here but keeping for later
|
||||
op2val = line.split("op2val")[1].split("x")[1].strip()
|
||||
if op2val[-1] == ";": op2val = op2val[:-1] # remove ; if it's there
|
||||
if "-" in line.split("op2val")[1].split("x")[0]: # neg sign handling
|
||||
op2val = twos_comp(my_config.bits, op2val)
|
||||
# go to next test in vector
|
||||
detected = True
|
||||
elif "RVTEST_CODE_END" in line:
|
||||
done = True
|
||||
# ints don't have flags
|
||||
flags = "XX"
|
||||
# put it all together
|
||||
if not done:
|
||||
translation = "{}_{}_{}_{}_{}_{}".format(operation, ext_bits(op1val), ext_bits(op2val), ext_bits(answer.strip()), flags.strip(), rounding_mode)
|
||||
dest_file.write(translation + "\n")
|
||||
else:
|
||||
# print("read false")
|
||||
reading = False
|
||||
else:
|
||||
while reading:
|
||||
# get answer and flags from Ref...signature
|
||||
answer = src_file2.readline()
|
||||
#print(answer)
|
||||
packed = src_file2.readline()[6:]
|
||||
#print("Packed: ", packed)
|
||||
if len(packed.strip())>0: # if there is still stuff to read
|
||||
# print("packed")
|
||||
# parse through .S file
|
||||
detected = False
|
||||
done = False
|
||||
op1val = "0"
|
||||
op2val = "0"
|
||||
while not (detected or done):
|
||||
# print("det1")
|
||||
line = src_file1.readline()
|
||||
# print(line)
|
||||
if "op1val" in line:
|
||||
# print("det2")
|
||||
# parse line
|
||||
|
||||
# handle special case where destination register is hardwired to zero
|
||||
if "dest:x0" in line:
|
||||
answer = "x" * len(answer)
|
||||
|
||||
op1val = line.split("op1val")[1].split("x")[1].split(";")[0]
|
||||
if "-" in line.split("op1val")[1].split("x")[0]: # neg sign handling
|
||||
op1val = twos_comp(my_config.bits, op1val)
|
||||
if my_config.op != "fsqrt": # sqrt doesn't have two input vals
|
||||
op2val = line.split("op2val")[1].split("x")[1].strip()
|
||||
if op2val[-1] == ";": op2val = op2val[:-1] # remove ; if it's there
|
||||
if "-" in line.split("op2val")[1].split("x")[0]: # neg sign handling
|
||||
op2val = twos_comp(my_config.bits, op2val)
|
||||
# go to next test in vector
|
||||
detected = True
|
||||
elif "RVTEST_CODE_END" in line:
|
||||
done = True
|
||||
# rounding mode for float
|
||||
if not done and (my_config.op == "fsqrt" or my_config.op == "fdiv"):
|
||||
src_file2.readline() #skip first bc junk
|
||||
# print(my_config.bits, my_config.letter)
|
||||
if my_config.letter == "F" and my_config.bits == 64:
|
||||
reading = True
|
||||
# print("trigger 64F")
|
||||
#skip first 2 lines bc junk
|
||||
src_file2.readline()
|
||||
while reading:
|
||||
# get answer and flags from Ref...signature
|
||||
# answers are before deadbeef (first line of 4)
|
||||
# flags are after deadbeef (third line of 4)
|
||||
answer = src_file2.readline().strip()
|
||||
deadbeef = src_file2.readline().strip()
|
||||
# print(answer)
|
||||
if not (answer == "e7d4b281" and deadbeef == "6f5ca309"): # if there is still stuff to read
|
||||
# get flags
|
||||
packed = src_file2.readline().strip()[6:]
|
||||
flags, rounding_mode = unpack_rf(packed)
|
||||
|
||||
# put it all together
|
||||
if not done:
|
||||
translation = "{}_{}_{}_{}_{}_{}".format(operation, ext_bits(op1val), ext_bits(op2val), ext_bits(answer.strip()), flags, rounding_mode)
|
||||
dest_file.write(translation + "\n")
|
||||
else:
|
||||
# print("read false")
|
||||
reading = False
|
||||
# print("out")
|
||||
dest_file.close()
|
||||
src_file1.close()
|
||||
src_file2.close()
|
||||
# skip 00000000 buffer
|
||||
src_file2.readline()
|
||||
|
||||
# parse through .S file
|
||||
detected = False
|
||||
done = False
|
||||
op1val = "0"
|
||||
op2val = "0"
|
||||
while not (detected or done):
|
||||
# print("det1")
|
||||
line = src_file1.readline()
|
||||
# print(line)
|
||||
if "op1val" in line:
|
||||
# print("det2")
|
||||
# parse line
|
||||
|
||||
# handle special case where destination register is hardwired to zero
|
||||
if "dest:x0" in line:
|
||||
answer = "x" * len(answer)
|
||||
|
||||
op1val = line.split("op1val")[1].split("x")[1].split(";")[0]
|
||||
if my_config.op != "fsqrt": # sqrt doesn't have two input vals
|
||||
op2val = line.split("op2val")[1].split("x")[1].strip()
|
||||
if op2val[-1] == ";": op2val = op2val[:-1] # remove ; if it's there
|
||||
else:
|
||||
op2val = 32*"X"
|
||||
# go to next test in vector
|
||||
detected = True
|
||||
elif "RVTEST_CODE_END" in line:
|
||||
done = True
|
||||
# put it all together
|
||||
if not done:
|
||||
translation = f"{operation}_{ext_bits(op1val)}_{ext_bits(op2val)}_{ext_bits(answer.strip())}_{flags}_{rounding_mode}"
|
||||
dest_file.write(translation + "\n")
|
||||
else:
|
||||
# print("read false")
|
||||
reading = False
|
||||
elif my_config.letter == "M" and my_config.bits == 64:
|
||||
reading = True
|
||||
#skip first 2 lines bc junk
|
||||
src_file2.readline()
|
||||
while reading:
|
||||
# print("trigger 64M")
|
||||
# get answer from Ref...signature
|
||||
# answers span two lines and are reversed
|
||||
answer2 = src_file2.readline().strip()
|
||||
answer1 = src_file2.readline().strip()
|
||||
answer = answer1 + answer2
|
||||
#print(answer1,answer2)
|
||||
if not (answer2 == "e7d4b281" and answer1 == "6f5ca309"): # if there is still stuff to read
|
||||
# parse through .S file
|
||||
detected = False
|
||||
done = False
|
||||
op1val = "0"
|
||||
op2val = "0"
|
||||
while not (detected or done):
|
||||
# print("det1")
|
||||
line = src_file1.readline()
|
||||
# print(line)
|
||||
if "op1val" in line:
|
||||
# print("det2")
|
||||
# parse line
|
||||
# handle special case where destination register is hardwired to zero
|
||||
if "dest:x0" in line:
|
||||
answer = "x" * len(answer)
|
||||
op1val = line.split("op1val")[1].split("x")[1].split(";")[0]
|
||||
if "-" in line.split("op1val")[1].split("x")[0]: # neg sign handling
|
||||
op1val = twos_comp(my_config.bits, op1val)
|
||||
if my_config.op != "fsqrt": # sqrt doesn't have two input vals, unnec here but keeping for later
|
||||
op2val = line.split("op2val")[1].split("x")[1].strip()
|
||||
if op2val[-1] == ";": op2val = op2val[:-1] # remove ; if it's there
|
||||
if "-" in line.split("op2val")[1].split("x")[0]: # neg sign handling
|
||||
op2val = twos_comp(my_config.bits, op2val)
|
||||
# go to next test in vector
|
||||
detected = True
|
||||
elif "RVTEST_CODE_END" in line:
|
||||
done = True
|
||||
# ints don't have flags
|
||||
flags = "XX"
|
||||
# put it all together
|
||||
if not done:
|
||||
translation = f"{operation}_{ext_bits(op1val)}_{ext_bits(op2val)}_{ext_bits(answer.strip())}_{flags.strip()}_{rounding_mode}"
|
||||
dest_file.write(translation + "\n")
|
||||
else:
|
||||
# print("read false")
|
||||
reading = False
|
||||
elif my_config.letter == "M" and my_config.bits == 32:
|
||||
reading = True
|
||||
while reading:
|
||||
# print("trigger 64M")
|
||||
# get answer from Ref...signature
|
||||
# answers span two lines and are reversed
|
||||
answer = src_file2.readline().strip()
|
||||
# print(f"Answer: {answer}")
|
||||
#print(answer1,answer2)
|
||||
if answer != '6f5ca309': # if there is still stuff to read
|
||||
# parse through .S file
|
||||
detected = False
|
||||
done = False
|
||||
op1val = "0"
|
||||
op2val = "0"
|
||||
while not (detected or done):
|
||||
# print("det1")
|
||||
line = src_file1.readline()
|
||||
# print(line)
|
||||
if "op1val" in line:
|
||||
# print("det2")
|
||||
# parse line
|
||||
# handle special case where destination register is hardwired to zero
|
||||
if "dest:x0" in line:
|
||||
answer = "x" * len(answer)
|
||||
op1val = line.split("op1val")[1].split("x")[1].split(";")[0]
|
||||
if "-" in line.split("op1val")[1].split("x")[0]: # neg sign handling
|
||||
op1val = line.split("op1val")[1].split("x")[1].split(";")[0]
|
||||
if "-" in line.split("op1val")[1].split("x")[0]: # neg sign handling
|
||||
op1val = twos_comp(my_config.bits, op1val)
|
||||
if my_config.op != "fsqrt": # sqrt doesn't have two input vals, unnec here but keeping for later
|
||||
op2val = line.split("op2val")[1].split("x")[1].strip()
|
||||
if op2val[-1] == ";": op2val = op2val[:-1] # remove ; if it's there
|
||||
if "-" in line.split("op2val")[1].split("x")[0]: # neg sign handling
|
||||
op2val = twos_comp(my_config.bits, op2val)
|
||||
# go to next test in vector
|
||||
detected = True
|
||||
elif "RVTEST_CODE_END" in line:
|
||||
done = True
|
||||
# ints don't have flags
|
||||
flags = "XX"
|
||||
# put it all together
|
||||
if not done:
|
||||
translation = f"{operation}_{ext_bits(op1val)}_{ext_bits(op2val)}_{ext_bits(answer.strip())}_{flags.strip()}_{rounding_mode}"
|
||||
dest_file.write(translation + "\n")
|
||||
else:
|
||||
# print("read false")
|
||||
reading = False
|
||||
else:
|
||||
while reading:
|
||||
# get answer and flags from Ref...signature
|
||||
answer = src_file2.readline()
|
||||
#print(answer)
|
||||
packed = src_file2.readline()[6:]
|
||||
#print("Packed: ", packed)
|
||||
if len(packed.strip())>0: # if there is still stuff to read
|
||||
# print("packed")
|
||||
# parse through .S file
|
||||
detected = False
|
||||
done = False
|
||||
op1val = "0"
|
||||
op2val = "0"
|
||||
while not (detected or done):
|
||||
# print("det1")
|
||||
line = src_file1.readline()
|
||||
# print(line)
|
||||
if "op1val" in line:
|
||||
# print("det2")
|
||||
# parse line
|
||||
|
||||
# handle special case where destination register is hardwired to zero
|
||||
if "dest:x0" in line:
|
||||
answer = "x" * len(answer)
|
||||
|
||||
op1val = line.split("op1val")[1].split("x")[1].split(";")[0]
|
||||
if "-" in line.split("op1val")[1].split("x")[0]: # neg sign handling
|
||||
op1val = twos_comp(my_config.bits, op1val)
|
||||
if my_config.op != "fsqrt": # sqrt doesn't have two input vals
|
||||
op2val = line.split("op2val")[1].split("x")[1].strip()
|
||||
if op2val[-1] == ";": op2val = op2val[:-1] # remove ; if it's there
|
||||
if "-" in line.split("op2val")[1].split("x")[0]: # neg sign handling
|
||||
op2val = twos_comp(my_config.bits, op2val)
|
||||
# go to next test in vector
|
||||
detected = True
|
||||
elif "RVTEST_CODE_END" in line:
|
||||
done = True
|
||||
# rounding mode for float
|
||||
if not done and (my_config.op == "fsqrt" or my_config.op == "fdiv"):
|
||||
flags, rounding_mode = unpack_rf(packed)
|
||||
|
||||
# put it all together
|
||||
if not done:
|
||||
translation = f"{operation}_{ext_bits(op1val)}_{ext_bits(op2val)}_{ext_bits(answer.strip())}_{flags}_{rounding_mode}"
|
||||
dest_file.write(translation + "\n")
|
||||
else:
|
||||
# print("read false")
|
||||
reading = False
|
||||
# print("out")
|
||||
|
||||
config_list = [
|
||||
Config(32, "M", "div", "div-", 0),
|
||||
|
@ -309,4 +303,4 @@ Config(64, "M", "remuw", "remuw-", 9)
|
|||
]
|
||||
|
||||
for c in config_list:
|
||||
create_vectors(c)
|
||||
create_vectors(c)
|
||||
|
|
|
@ -27,8 +27,8 @@ round_dict = {
|
|||
|
||||
print("creating testfloat div test vectors")
|
||||
|
||||
source_dir = "{}/tests/fp/vectors/".format(wally)
|
||||
dest_dir = "{}/tests/fp/combined_IF_vectors/IF_vectors/".format(wally)
|
||||
source_dir = f"{wally}/tests/fp/vectors/"
|
||||
dest_dir = f"{wally}/tests/fp/combined_IF_vectors/IF_vectors/"
|
||||
all_vectors = os.listdir(source_dir)
|
||||
|
||||
div_vectors = [v for v in all_vectors if "div" in v]
|
||||
|
@ -39,19 +39,15 @@ for vector in div_vectors:
|
|||
config_list = vector.split(".")[0].split("_")
|
||||
operation = "1" #float div
|
||||
rounding_mode = round_dict[str(config_list[2])]
|
||||
# use name to create our new tv
|
||||
dest_file = open(dest_dir + "cvw_" + vector, 'a')
|
||||
# open vector
|
||||
src_file = open(source_dir + vector,'r')
|
||||
# for each test in the vector
|
||||
for i in src_file.readlines():
|
||||
translation = "" # this stores the test that we are currently working on
|
||||
[input_1, input_2, answer, flags] = i.split("_") # separate inputs, answer, and flags
|
||||
# put it all together, strip nec for removing \n on the end of the flags
|
||||
translation = "{}_{}_{}_{}_{}_{}".format(operation, ext_bits(input_1), ext_bits(input_2), ext_bits(answer), flags.strip(), rounding_mode)
|
||||
dest_file.write(translation + "\n")
|
||||
dest_file.close()
|
||||
src_file.close()
|
||||
# use name to create our new tv and open vector
|
||||
with open(dest_dir + "cvw_" + vector, 'a') as dest_file, open(source_dir + vector) as src_file:
|
||||
# for each test in the vector
|
||||
for i in src_file.readlines():
|
||||
translation = "" # this stores the test that we are currently working on
|
||||
[input_1, input_2, answer, flags] = i.split("_") # separate inputs, answer, and flags
|
||||
# put it all together, strip nec for removing \n on the end of the flags
|
||||
translation = f"{operation}_{ext_bits(input_1)}_{ext_bits(input_2)}_{ext_bits(answer)}_{flags.strip()}_{rounding_mode}"
|
||||
dest_file.write(translation + "\n")
|
||||
|
||||
|
||||
print("creating testfloat sqrt test vectors")
|
||||
|
@ -64,16 +60,12 @@ for vector in sqrt_vectors:
|
|||
config_list = vector.split(".")[0].split("_")
|
||||
operation = "2" #sqrt
|
||||
rounding_mode = round_dict[str(config_list[2])]
|
||||
# use name to create our new tv
|
||||
dest_file = open(dest_dir + "cvw_" + vector, 'a')
|
||||
# open vector
|
||||
src_file = open(source_dir + vector,'r')
|
||||
# for each test in the vector
|
||||
for i in src_file.readlines():
|
||||
translation = "" # this stores the test that we are currently working on
|
||||
[input_1, answer, flags] = i.split("_") # separate inputs, answer, and flags
|
||||
# put it all together, strip nec for removing \n on the end of the flags
|
||||
translation = "{}_{}_{}_{}_{}_{}".format(operation, ext_bits(input_1), "X"*32, ext_bits(answer), flags.strip(), rounding_mode)
|
||||
dest_file.write(translation + "\n")
|
||||
dest_file.close()
|
||||
src_file.close()
|
||||
# use name to create our new tv and open vector
|
||||
with open(dest_dir + "cvw_" + vector, 'a') as dest_file, open(source_dir + vector) as src_file:
|
||||
# for each test in the vector
|
||||
for i in src_file.readlines():
|
||||
translation = "" # this stores the test that we are currently working on
|
||||
[input_1, answer, flags] = i.split("_") # separate inputs, answer, and flags
|
||||
# put it all together, strip nec for removing \n on the end of the flags
|
||||
translation = "{}_{}_{}_{}_{}_{}".format(operation, ext_bits(input_1), "X"*32, ext_bits(answer), flags.strip(), rounding_mode)
|
||||
dest_file.write(translation + "\n")
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
from pkgutil import extend_path
|
||||
__path__ = extend_path(__path__, __name__)
|
||||
__path__ = extend_path(__path__, __name__)
|
||||
|
|
|
@ -1,17 +1,10 @@
|
|||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import shlex
|
||||
import logging
|
||||
import random
|
||||
import string
|
||||
from string import Template
|
||||
|
||||
import riscof.utils as utils
|
||||
from riscof.pluginTemplate import pluginTemplate
|
||||
import riscof.constants as constants
|
||||
from riscv_isac.isac import isac
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
|
@ -28,8 +21,8 @@ class sail_cSim(pluginTemplate):
|
|||
raise SystemExit(1)
|
||||
self.num_jobs = str(config['jobs'] if 'jobs' in config else 1)
|
||||
self.pluginpath = os.path.abspath(config['pluginpath'])
|
||||
self.sail_exe = { '32' : os.path.join(config['PATH'] if 'PATH' in config else "","riscv_sim_RV32"),
|
||||
'64' : os.path.join(config['PATH'] if 'PATH' in config else "","riscv_sim_RV64")}
|
||||
self.sail_exe = { '32' : os.path.join(config['PATH'] if 'PATH' in config else "","riscv_sim_rv32d"),
|
||||
'64' : os.path.join(config['PATH'] if 'PATH' in config else "","riscv_sim_rv64d")}
|
||||
self.isa_spec = os.path.abspath(config['ispec']) if 'ispec' in config else ''
|
||||
self.platform_spec = os.path.abspath(config['pspec']) if 'ispec' in config else ''
|
||||
self.make = config['make'] if 'make' in config else 'make'
|
||||
|
@ -72,11 +65,11 @@ class sail_cSim(pluginTemplate):
|
|||
self.sailargs += "--enable-zcb"
|
||||
if "Q" in ispec["ISA"]:
|
||||
self.isa += 'q'
|
||||
objdump = "riscv64-unknown-elf-objdump".format(self.xlen)
|
||||
objdump = "riscv64-unknown-elf-objdump"
|
||||
if shutil.which(objdump) is None:
|
||||
logger.error(objdump+": executable not found. Please check environment setup.")
|
||||
raise SystemExit(1)
|
||||
compiler = "riscv64-unknown-elf-gcc".format(self.xlen)
|
||||
compiler = "riscv64-unknown-elf-gcc"
|
||||
if shutil.which(compiler) is None:
|
||||
logger.error(compiler+": executable not found. Please check environment setup.")
|
||||
raise SystemExit(1)
|
||||
|
@ -114,9 +107,9 @@ class sail_cSim(pluginTemplate):
|
|||
if ('NO_SAIL=True' in testentry['macros']):
|
||||
# if the tests can't run on SAIL we copy the reference output to the src directory
|
||||
reference_output = re.sub("/src/","/references/", re.sub(".S",".reference_output", test))
|
||||
execute += 'cut -c-{0:g} {1} > {2}'.format(8, reference_output, sig_file) #use cut to remove comments when copying
|
||||
execute += f'cut -c-{8:g} {reference_output} > {sig_file}' #use cut to remove comments when copying
|
||||
else:
|
||||
execute += self.sail_exe[self.xlen] + ' -z268435455 -i --trace=step ' + self.sailargs + ' --test-signature={0} {1} > {2}.log 2>&1;'.format(sig_file, elf, test_name)
|
||||
execute += self.sail_exe[self.xlen] + ' -z268435455 -i --trace=step ' + self.sailargs + f' --test-signature={sig_file} {elf} > {test_name}.log 2>&1;'
|
||||
|
||||
cov_str = ' '
|
||||
for label in testentry['coverage_labels']:
|
||||
|
@ -124,10 +117,10 @@ class sail_cSim(pluginTemplate):
|
|||
|
||||
if cgf_file is not None:
|
||||
coverage_cmd = 'riscv_isac --verbose info coverage -d \
|
||||
-t {0}.log --parser-name c_sail -o coverage.rpt \
|
||||
-t {}.log --parser-name c_sail -o coverage.rpt \
|
||||
--sig-label begin_signature end_signature \
|
||||
--test-label rvtest_code_begin rvtest_code_end \
|
||||
-e ref.elf -c {1} -x{2} {3};'.format(\
|
||||
-e ref.elf -c {} -x{} {};'.format(\
|
||||
test_name, ' -c '.join(cgf_file), self.xlen, cov_str)
|
||||
else:
|
||||
coverage_cmd = ''
|
||||
|
|
|
@ -1,16 +1,8 @@
|
|||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import shlex
|
||||
import logging
|
||||
import random
|
||||
import string
|
||||
from string import Template
|
||||
import sys
|
||||
|
||||
import riscof.utils as utils
|
||||
import riscof.constants as constants
|
||||
from riscof.pluginTemplate import pluginTemplate
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
@ -194,22 +186,22 @@ class spike(pluginTemplate):
|
|||
# cmd = self.compile_cmd.format(testentry['isa'].lower().replace('zicsr', ' ', 2), self.xlen, test, elf, compile_macros)
|
||||
cmd = self.compile_cmd.format(testentry['isa'].lower(), self.xlen, test, elf, compile_macros)
|
||||
|
||||
# if the user wants to disable running the tests and only compile the tests, then
|
||||
# the "else" clause is executed below assigning the sim command to simple no action
|
||||
# echo statement.
|
||||
# if the user wants to disable running the tests and only compile the tests, then
|
||||
# the "else" clause is executed below assigning the sim command to simple no action
|
||||
# echo statement.
|
||||
if self.target_run:
|
||||
# set up the simulation command. Template is for spike. Please change.
|
||||
if ('NO_SAIL=True' in testentry['macros']):
|
||||
# if the tests can't run on SAIL we copy the reference output to the src directory
|
||||
reference_output = re.sub("/src/","/references/", re.sub(".S",".reference_output", test))
|
||||
simcmd = 'cut -c-{0:g} {1} > {2}'.format(8, reference_output, sig_file) #use cut to remove comments when copying
|
||||
simcmd = f'cut -c-{8:g} {reference_output} > {sig_file}' #use cut to remove comments when copying
|
||||
else:
|
||||
simcmd = self.dut_exe + ' --isa={0} +signature={1} +signature-granularity=4 {2}'.format(self.isa, sig_file, elf)
|
||||
simcmd = self.dut_exe + f' --isa={self.isa} +signature={sig_file} +signature-granularity=4 {elf}'
|
||||
else:
|
||||
simcmd = 'echo "NO RUN"'
|
||||
|
||||
# concatenate all commands that need to be executed within a make-target.
|
||||
execute = '@cd {0}; {1}; {2};'.format(testentry['work_dir'], cmd, simcmd)
|
||||
execute = '@cd {}; {}; {};'.format(testentry['work_dir'], cmd, simcmd)
|
||||
|
||||
# create a target. The makeutil will create a target with the name "TARGET<num>" where num
|
||||
# starts from 0 and increments automatically for each new target that is added
|
||||
|
|
|
@ -17,7 +17,7 @@ if __name__ == "__main__":
|
|||
line_num = int(sig_adr / 4) + 1
|
||||
offset = sig_adr & 0x3F
|
||||
test_num = int((sig_adr-offset)/int("40",16))
|
||||
print("IntrNum 0x{:02X}".format(test_num))
|
||||
print("Offset 0x{:02X}".format(offset))
|
||||
print(f"IntrNum 0x{test_num:02X}")
|
||||
print(f"Offset 0x{offset:02X}")
|
||||
print("LineNum "+str(line_num))
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue