Skip to content
Snippets Groups Projects
Verified Commit 31734fa9 authored by Sebastien Michelland's avatar Sebastien Michelland
Browse files

campaign summary script + first version of performance tests

parent 011d683a
No related branches found
No related tags found
No related merge requests found
......@@ -84,7 +84,8 @@ RUN cd gem5 && \
# Copy test files
COPY mibench mibench/
COPY riscv_cc_REF riscv_cc_FSH riscv_qemu_FSH \
elf32lriscv_ref.x elf32lriscv_ccs.x fault.py Makefile .
elf32lriscv_ref.x elf32lriscv_ccs.x \
fault.py summary.py gem5script.py Makefile .
# Squash the final image so we don't ship source and build files as diffs
FROM scratch
......
......@@ -22,11 +22,11 @@ all_REF run_REF all_FSH run_FSH clean: %:
@ $(MAKE) -C mibench/security/sha $*
distclean: clean
@ rm -rf $(OUT)/
@ rm -rf out/
.PHONY: all_REF run_REF all_FSH run_FSH clean distclean
OUT := out
OUT := out/campaigns
CAMPAIGNS_REF := ref-ex-s32-1 ref-ex-s32-2 ref-ex-sar32
CAMPAIGNS_FSH := fsh-ex-s32-1 fsh-ex-s32-2 fsh-ex-sar32 fsh-multi-random
......@@ -95,5 +95,29 @@ campaigns:
$(OUT)/:
@ mkdir -p $@
.PHONY: campaigns%
simulations:
# Generate a simulation rule
# $1: Program name
# $2: Cache setting ("nocache" or "icache")
# $3: Program kind ("REF" or "FSH")
# TODO: Avoid the .PHONY
define simulation_rule
out/m5out/$1_$2_$3:
@ mkdir -p out/m5out
./prefix/bin/gem5.opt -r -d "$$@" ./gem5script.py "$$@"
.PHONY: out/m5out/$1_$2_$3
simulations: out/m5out/$1_$2_$3
endef
# Generate simulation rules for all programs
$(foreach P,$(PROGRAMS),$(eval \
$(call simulation_rule,$(notdir $P),nocache,REF) \
$(call simulation_rule,$(notdir $P),nocache,FSH) \
$(call simulation_rule,$(notdir $P),icache,REF) \
$(call simulation_rule,$(notdir $P),icache,FSH)))
.PHONY: campaigns% simulations
.PRECIOUS: $(OUT)/
......@@ -31,6 +31,7 @@ TODO: Link to Zenodo.
root@(container):~# make all_REF all_FSH run_REF run_FSH
root@(container):~# make -j$(nproc) campaigns
root@(container):~# TODO
# make -j$(nproc) simulations
```
The Docker image is just a build of this repository on Ubuntu 22.04; see [Detailed description](#detailed-description) for an explanation of the contents. To build natively without using Docker, please check the [Manual build](#manual-build) instructions and the [Dockerfile](Dockerfile) as a reference.
......@@ -67,7 +68,7 @@ Other files used in the testing process include:
- `mibench`: Programs from the [MiBench benchmark suite](https://vhosts.eecs.umich.edu/mibench/index.html). We target the Industrial, Network and Security applications. The source files are original but the Makefiles are basically new.
- `riscv_qemu_REF`, `riscv_qemu_FSH`: Wrappers around QEMU and QEMU-with-FSH-support.
- `fault.py`: Script for running fault injection campaigns (details inside).
- `fault_summary.py`: TODO.
- `summary.py`: Script for aggregating security and performance test results.
- TODO: Generating figures.
The Makefile just contains a few top-level commands for using the project.
......
Subproject commit 7ab0b059258be4e81db83b4af47855739df38335
Subproject commit 762dd62acf689fee74ce204a8379971971a5e311
# From tutorial:
# <https://www.gem5.org/documentation/learning_gem5/part1/simple_config/>
# with basically no changes
import m5
import sys
from m5.objects import *
# Because most of our programs are small, use a 4 * 2kB instruction cache
# (associativity will matter because dual .text/.text_ccs sections with each
# their own alignment)
class ICache(Cache):
size = "8kB"
assoc = 4
tag_latency = 2
data_latency = 2
response_latency = 2
mshrs = 4
tgts_per_mshr = 20
def connectCPU(self, cpu):
self.cpu_side = cpu.icache_port
def connectBus(self, bus):
self.mem_side = bus.cpu_side_ports
class DCache(Cache):
size = "32kB"
assoc = 4
tag_latency = 2
data_latency = 2
response_latency = 2
mshrs = 4
tgts_per_mshr = 20
def connectCPU(self, cpu):
self.cpu_side = cpu.dcache_port
def connectBus(self, bus):
self.mem_side = bus.cpu_side_ports
def run(binary, cmd, cache=False):
system = System()
# 1 GHz source clock
system.clk_domain = SrcClockDomain()
system.clk_domain.clock = "1GHz"
system.clk_domain.voltage_domain = VoltageDomain()
# 512 MB of RAM
system.mem_mode = "timing"
system.mem_ranges = [AddrRange("512MB")]
# Basic RISC-V timings
system.cpu = RiscvTimingSimpleCPU()
system.membus = SystemXBar()
# Apply cache as requested
if cache:
system.cpu.icache = ICache()
system.cpu.icache.connectCPU(system.cpu)
system.cpu.icache.connectBus(system.membus)
system.cpu.dcache = DCache()
system.cpu.dcache.connectCPU(system.cpu)
system.cpu.dcache.connectBus(system.membus)
# system.cpu.dcache_port = system.membus.cpu_side_ports
else:
system.cpu.icache_port = system.membus.cpu_side_ports
system.cpu.dcache_port = system.membus.cpu_side_ports
# Trivial interrupt controller
system.cpu.createInterruptController()
system.system_port = system.membus.cpu_side_ports
# DDR3 RAM controller to service RAM requests for entire address range
system.mem_ctrl = MemCtrl()
system.mem_ctrl.dram = DDR3_1600_8x8()
system.mem_ctrl.dram.range = system.mem_ranges[0]
system.mem_ctrl.port = system.membus.mem_side_ports
# Emulate a binary file in Syscall Emulation (SE) mode
system.workload = SEWorkload.init_compatible(binary)
# Assign threads
process = Process()
process.cmd = cmd
system.cpu.workload = process
system.cpu.createThreads()
# Root the system and instantiate objects in the C++ backend
root = Root(full_system=False, system=system)
m5.instantiate()
# Run the simulation
print("Beginning simulation!")
exit_event = m5.simulate()
print('Exiting @ tick {} because {}'
.format(m5.curTick(), exit_event.getCause()))
EXECUTABLES = {
"basicmath": "mibench/automotive/basicmath/basicmath_small",
"bitcount": "mibench/automotive/bitcount/bitcnts",
"qsort": "mibench/automotive/qsort/qsort_small",
"susan": "mibench/automotive/susan/susan",
"dijkstra": "mibench/network/dijkstra/dijkstra_small",
# "patricia": "mibench/network/patricia/patricia",
"blowfish": "mibench/security/blowfish/bf",
"rijndael": "mibench/security/rijndael/rijndael",
"sha": "mibench/security/sha/sha",
}
BLOWFISH_KEY = "1234567890abcdeffedcba0987654321"
AES_KEY = "1234567890abcdeffedcba09876543211234567890abcdeffedcba0987654321"
CLI_ARGS = {
"basicmath": [],
"bitcount": ["10000"],
"qsort": ["mibench/automotive/qsort/input_small.dat"],
"susan": ["mibench/automotive/susan/input_small.pgm",
"out/m5out/susan_output_small.pgm",
"-s"],
"dijkstra": ["mibench/network/dijkstra/input.dat"],
"patricia": ["mibench/network/patricia/small.udp"],
"blowfish": ["e",
"mibench/security/blowfish/input_small.asc",
"out/m5out/blowfish_output_small.enc",
BLOWFISH_KEY],
"rijndael": ["mibench/security/rijndael/input_small.asc",
"out/m5out/rijndael_output_small.enc",
"e",
AES_KEY],
"sha": ["mibench/security/sha/input_small.asc"],
}
def main(output):
# Output is of the form ".../{name}_{config}_{ctgy}"
outfolder = os.path.dirname(output)
name, config, ctgy = os.path.basename(output).split("_")
assert ctgy == "REF" or ctgy == "FSH"
cache = (config == "icache") # the other option is "nocache"
if name not in EXECUTABLES:
print(f"skipping {name}")
return 0
exe = EXECUTABLES[name] + "_" + ctgy
print("[command line]", exe, CLI_ARGS[name])
run(exe, [exe] + CLI_ARGS[name], cache)
return 0
main(*sys.argv[1:])
#! /usr/bin/env python3
from datetime import datetime
import elftools.elf.elffile
import subprocess
import glob
import re
import os
# List of programs as (folder, reference binary, hardened binary) triplets
ALL_PROGRAMS = [
("mibench/automotive/basicmath",
"basicmath_small_REF", "basicmath_small_FSH"),
("mibench/automotive/bitcount",
"bitcnts_REF", "bitcnts_FSH"),
("mibench/automotive/qsort",
"qsort_small_REF", "qsort_small_FSH"),
("mibench/automotive/susan",
"susan_REF", "susan_FSH"),
("mibench/network/dijkstra",
"dijkstra_small_REF", "dijkstra_small_FSH"),
("mibench/network/patricia",
"patricia_REF", "patricia_FSH"),
("mibench/security/blowfish",
"bf_REF", "bf_FSH"),
("mibench/security/rijndael",
"rijndael_REF", "rijndael_FSH"),
("mibench/security/sha",
"sha_REF", "sha_FSH"),
]
# List of campaigns
ALL_CAMPAIGNS = [
"fsh-ex-s32-1",
"fsh-ex-s32-2",
"fsh-ex-sar32",
"fsh-multi-random",
"ref-ex-s32-1",
"ref-ex-s32-2",
"ref-ex-sar32",
]
# Work folder
WORK_FOLDER = "out/"
# Header for the auto-generated README file
README_HEADER = """
------------------------------
GENERATED FILES - DO NOT EDIT!
------------------------------
These are fetch skips hardening test results.
Generated by {USER}@{HOSTNAME} on {NOW}.
campaigns/*.txt -- generated by fault.py:
Raw results of fault injection campaigns (might be partial), and information
about PCs not reached by test executions (*-notreached.txt).
campaigns.csv -- generated by summary.py:
Summary of all above results of campaigns in CSV format.
size.csv -- generated by summary.py:
Variation in program size between reference and hardened version.
TODO: Performance simulations.
""".strip()
# Get the short name for a program given by index
def program_name(i):
return os.path.basename(ALL_PROGRAMS[i][0])
# Get the campaign output name for a given program by index
def file_campaign_output(i, campaign):
basename = f"{program_name(i)}-campaign-{campaign}.txt"
return os.path.join(WORK_FOLDER, "campaigns", basename)
def get_text_and_text_ccs_size(path):
fp = open(path, "rb")
elf = elftools.elf.elffile.ELFFile(fp)
text = elf.get_section_by_name(".text").data_size
text_ccs = elf.get_section_by_name(".text_ccs")
text_ccs = 0 if text_ccs is None else text_ccs.data_size
return (text, text_ccs)
def fault_log_to_csv(path):
with open(path, "r") as fp:
progress, header, values, *_ = fp.read().split("\n", 4)
if progress[0] != "=":
print(f"{path} is not finished, ignoring")
return None
csv = dict(zip(header.split(","), values.split(",")))
del csv["setting"]
return csv
def main():
# Read fields from all campaign result files
results = dict()
for i in range(len(ALL_PROGRAMS)):
name = program_name(i)
for campaign in ALL_CAMPAIGNS:
csv = fault_log_to_csv(file_campaign_output(i, campaign))
if csv is not None:
results[(name, campaign)] = csv
# Full set of fields found in CSV files
all_fields = set.union(*[set(csv.keys()) for _, csv in results.items()])
all_fields = sorted(all_fields)
# Generate a single large table
with open(os.path.join(WORK_FOLDER, "campaigns.csv"), "w") as fp:
fp.write("program,campaign," + ",".join(all_fields) + "\n")
for (name, campaign), values in results.items():
fp.write(name + "," + campaign + ",")
fp.write(",".join(values.get(f, "0") for f in all_fields))
fp.write("\n")
# Generate information about file sizes
with open(os.path.join(WORK_FOLDER, "size.csv"), "w") as fp:
fp.write("program,size_REF,size_FSH\n")
for i, (path, refBinary, fshBinary) in enumerate(ALL_PROGRAMS):
refBinary = os.path.join(path, refBinary)
fshBinary = os.path.join(path, fshBinary)
REF_text, _ = get_text_and_text_ccs_size(refBinary)
FSH_text, FSH_text_ccs = get_text_and_text_ccs_size(fshBinary)
x = REF_text - FSH_text
y = FSH_text_ccs
fp.write(f"{program_name(i)},{x},{y}\n")
# Generate a README file to remember some useful data
with open(WORK_FOLDER + "/README", "w") as fp:
fp.write(README_HEADER.format(
USER=os.getenv("USER"),
HOSTNAME=os.uname().nodename,
NOW=str(datetime.now())) + "\n")
fp.write("\nTool versions:\n")
for repo in ["llvm-property-preserving", "binutils-gdb", "qemu"]:
cmd = ["git", "-C", repo, "rev-parse", "@"]
proc = subprocess.run(cmd,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if proc.returncode == 0:
fp.write("- " + repo + ": " + proc.stdout.decode("utf-8"))
else:
fp.write("- " + repo + ": (could not obtain commit)\n")
print(f"note: could not find commit for {repo}")
fp.write("\nInput files:\n")
def writemtime(fp, file):
dt = datetime.fromtimestamp(os.path.getmtime(file))
fp.write("- " + file + ": " + str(dt) + "\n")
for path, refBinary, fshBinary in ALL_PROGRAMS:
writemtime(fp, os.path.join(path, refBinary))
writemtime(fp, os.path.join(path, fshBinary))
for i in range(len(ALL_PROGRAMS)):
for campaign in ALL_CAMPAIGNS:
co = file_campaign_output(i, campaign)
writemtime(fp, co)
main()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment