From 431744d97363534cf114e16184a0fc54cac901c0 Mon Sep 17 00:00:00 2001 From: Jerry Zhao Date: Mon, 1 Sep 2025 11:26:25 -0700 Subject: [PATCH] Add experimental chisel7 support --- .github/workflows/chipyard-chisel7.yml | 44 ++ build.sbt | 67 ++- common.mk | 53 +- generators/rocket-chip | 2 +- scripts/uniquify-module-names.py | 500 +++++++++++++++--- .../src/main/scala/ChipyardAnnotations.scala | 68 +++ .../src/main/scala/ChipyardCli.scala | 17 + .../src/main/scala/ChipyardOptions.scala | 40 ++ .../src/main/scala/ChipyardStage.scala | 60 +++ .../src/main/scala/StageUtils.scala | 32 ++ .../src/main/scala/package.scala | 24 + .../main/scala/phases/AddDefaultTests.scala | 52 ++ .../src/main/scala/phases/Checks.scala | 47 ++ .../main/scala/phases/GenerateArtefacts.scala | 26 + .../scala/phases/GenerateFirrtlAnnos.scala | 34 ++ .../phases/GenerateTestSuiteMakefrags.scala | 49 ++ .../main/scala/phases/PreElaboration.scala | 43 ++ .../src/main/scala/phases/PreservesAll.scala | 8 + .../scala/phases/TransformAnnotations.scala | 21 + variables.mk | 1 + 20 files changed, 1102 insertions(+), 86 deletions(-) create mode 100644 .github/workflows/chipyard-chisel7.yml create mode 100644 tools/stage-chisel7/src/main/scala/ChipyardAnnotations.scala create mode 100644 tools/stage-chisel7/src/main/scala/ChipyardCli.scala create mode 100644 tools/stage-chisel7/src/main/scala/ChipyardOptions.scala create mode 100644 tools/stage-chisel7/src/main/scala/ChipyardStage.scala create mode 100644 tools/stage-chisel7/src/main/scala/StageUtils.scala create mode 100644 tools/stage-chisel7/src/main/scala/package.scala create mode 100644 tools/stage-chisel7/src/main/scala/phases/AddDefaultTests.scala create mode 100644 tools/stage-chisel7/src/main/scala/phases/Checks.scala create mode 100644 tools/stage-chisel7/src/main/scala/phases/GenerateArtefacts.scala create mode 100644 tools/stage-chisel7/src/main/scala/phases/GenerateFirrtlAnnos.scala create mode 100644 tools/stage-chisel7/src/main/scala/phases/GenerateTestSuiteMakefrags.scala create mode 100644 tools/stage-chisel7/src/main/scala/phases/PreElaboration.scala create mode 100644 tools/stage-chisel7/src/main/scala/phases/PreservesAll.scala create mode 100644 tools/stage-chisel7/src/main/scala/phases/TransformAnnotations.scala diff --git a/.github/workflows/chipyard-chisel7.yml b/.github/workflows/chipyard-chisel7.yml new file mode 100644 index 0000000000..0f4ea828aa --- /dev/null +++ b/.github/workflows/chipyard-chisel7.yml @@ -0,0 +1,44 @@ +name: chipyard-chisel7 + +on: + pull_request: + branches: + - main + - '1.[0-9]*.x' + workflow_dispatch: + +defaults: + run: + shell: bash -leo pipefail {0} + +jobs: + chisel7-firrtl: + name: chisel7-firrtl + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Install DTC (device-tree-compiler) + run: | + sudo apt-get update + sudo apt-get install -y device-tree-compiler + + - name: Set up JDK 17 (Temurin) + uses: actions/setup-java@v4 + with: + distribution: 'temurin' + java-version: '17' + cache: 'sbt' + + - name: Init submodules (saturn only) + run: | + scripts/init-submodules-no-riscv-tools-nolog.sh --saturn + + - name: Run firrtl target with Chisel 7 + env: + USE_CHISEL7: "1" + run: | + cd sims/verilator + make firrtl + diff --git a/build.sbt b/build.sbt index 83ac3d622a..c4197aba77 100644 --- a/build.sbt +++ b/build.sbt @@ -1,6 +1,7 @@ import Tests._ val chisel6Version = "6.7.0" +val chisel7Version = "7.0.0-RC4" val chiselTestVersion = "6.0.0" val scalaVersionFromChisel = "2.13.16" @@ -92,12 +93,17 @@ lazy val chisel6Settings = Seq( libraryDependencies ++= Seq("org.chipsalliance" %% "chisel" % chisel6Version), addCompilerPlugin("org.chipsalliance" % "chisel-plugin" % chisel6Version cross CrossVersion.full) ) +lazy val chisel7Settings = Seq( + libraryDependencies ++= Seq("org.chipsalliance" %% "chisel" % chisel7Version), + addCompilerPlugin("org.chipsalliance" % "chisel-plugin" % chisel7Version cross CrossVersion.full) +) lazy val chisel3Settings = Seq( libraryDependencies ++= Seq("edu.berkeley.cs" %% "chisel3" % chisel3Version), addCompilerPlugin("edu.berkeley.cs" % "chisel3-plugin" % chisel3Version cross CrossVersion.full) ) -lazy val chiselSettings = chisel6Settings ++ Seq( +// Select Chisel 7 when USE_CHISEL7 is set in the environment; default to Chisel 6. +lazy val chiselSettings = (if (sys.env.contains("USE_CHISEL7")) chisel7Settings else chisel6Settings) ++ Seq( libraryDependencies ++= Seq( "org.apache.commons" % "commons-lang3" % "3.12.0", "org.apache.commons" % "commons-text" % "1.9" @@ -115,11 +121,17 @@ lazy val scalaTestSettings = Seq( // -- Rocket Chip -- -lazy val hardfloat = freshProject("hardfloat", file("generators/hardfloat/hardfloat")) - .settings(chiselSettings) - .settings(commonSettings) - .dependsOn(midas_target_utils) - .settings(scalaTestSettings) +lazy val hardfloat = { + val useChisel7 = sys.env.contains("USE_CHISEL7") + var hf = freshProject("hardfloat", file("generators/hardfloat/hardfloat")) + .settings(chiselSettings) + .settings(commonSettings) + .settings(scalaTestSettings) + if (!useChisel7) { + hf = hf.dependsOn(midas_target_utils) + } + hf +} lazy val rocketMacros = (project in rocketChipDir / "macros") .settings(commonSettings) @@ -154,16 +166,43 @@ lazy val testchipip = withInitCheck((project in file("generators/testchipip")), .settings(commonSettings) lazy val chipyard = { + val useChisel7 = sys.env.contains("USE_CHISEL7") // Base chipyard project with always-on dependencies // Use explicit Project(...) so the project id remains 'chipyard' - var cy = Project(id = "chipyard", base = file("generators/chipyard")) - .dependsOn( + val baseProjects: Seq[ProjectReference] = + Seq( testchipip, rocketchip, boom, rocketchip_blocks, rocketchip_inclusive_cache, - dsptools, rocket_dsp_utils, icenet, tracegen, constellation, barf, shuttle, rerocc, - firrtl2_bridge - ) + ).map(sbt.Project.projectToRef) ++ + (if (useChisel7) Seq() else Seq(sbt.Project.projectToRef(firrtl2_bridge))) ++ + (if (useChisel7) Seq() else Seq(sbt.Project.projectToRef(dsptools), sbt.Project.projectToRef(rocket_dsp_utils))) + + val baseDeps: Seq[sbt.ClasspathDep[sbt.ProjectReference]] = + baseProjects.map(pr => sbt.ClasspathDependency(pr, None)) + + // Optional settings to exclude specific sources under Chisel 7 + val dspExcludeSettings: Seq[Def.Setting[_]] = if (useChisel7) Seq( + Compile / unmanagedSources := { + val files = (Compile / unmanagedSources).value + val root = (ThisBuild / baseDirectory).value + val excludeList = Seq( + // Directories or files relative to repo root + "generators/chipyard/src/main/scala/example/dsptools", + "generators/chipyard/src/main/scala/config/MMIOAcceleratorConfigs.scala", + "generators/chipyard/src/main/scala/config/TutorialConfigs.scala", + "generators/chipyard/src/main/scala/upf" + ).map(p => (root / p).getCanonicalFile) + val (excludeDirs, excludeFiles) = excludeList.partition(_.isDirectory) + files.filterNot { f => + val cf = f.getCanonicalFile + excludeFiles.contains(cf) || excludeDirs.exists(d => cf.toPath.startsWith(d.toPath)) + } + } + ) else Seq.empty + + var cy = Project(id = "chipyard", base = file("generators/chipyard")) + .dependsOn(baseDeps: _*) .settings(libraryDependencies ++= rocketLibDeps.value) .settings( libraryDependencies ++= Seq( @@ -171,7 +210,11 @@ lazy val chipyard = { ) ) .settings(commonSettings) - .settings(Compile / unmanagedSourceDirectories += file("tools/stage/src/main/scala")) + .settings(Compile / unmanagedSourceDirectories += { + if (useChisel7) file("tools/stage-chisel7/src/main/scala") + else file("tools/stage/src/main/scala") + }) + .settings(dspExcludeSettings: _*) // Optional modules discovered via initialized submodules (no env or manifest) val optionalModules: Seq[(String, ProjectReference)] = Seq( diff --git a/common.mk b/common.mk index 8de57f3b75..297b82aaa0 100644 --- a/common.mk +++ b/common.mk @@ -16,6 +16,21 @@ define require_cmd || { echo "Error: $(1) not found in PATH. Set up your tool environment before building this target." >&2; exit 1; } endef +# Require minimum firtool version when building with Chisel 7 +define require_firtool_version + @if [ -n "$(USE_CHISEL7)" ]; then \ + vline=`$(FIRTOOL_BIN) --version 2>/dev/null | grep -E 'CIRCT firtool-[0-9]+\.[0-9]+\.[0-9]+' | head -1`; \ + vstr=$${vline##*firtool-}; \ + if [ -z "$$vstr" ]; then \ + echo "Error: Unable to parse firtool version. Ensure '$(FIRTOOL_BIN) --version' prints 'CIRCT firtool-X.Y.Z'." >&2; exit 1; \ + fi; \ + maj=$${vstr%%.*}; rest=$${vstr#*.}; min=$${rest%%.*}; pat=$${rest#*.}; \ + if [ "$$maj" -lt 1 ] || { [ "$$maj" -eq 1 ] && [ "$$min" -lt 129 ]; }; then \ + echo "Error: USE_CHISEL7 requires firtool >= 1.129.0, found $$vstr. Please update CIRCT firtool." >&2; exit 1; \ + fi; \ + fi +endef + ######################################################################################### # specify user-interface variables ######################################################################################### @@ -68,7 +83,9 @@ HELP_COMMANDS += \ " run-tests = run all assembly and benchmark tests" \ " launch-sbt = start sbt terminal" \ " find-configs = list Chipyard Config classes (eligible CONFIG=)" \ -" find-config-fragments = list all config. fragments" +" find-config-fragments = list all config. fragments" \ +" run-firtool = run CIRCT firtool to emit Verilog/JSON/mem conf" \ +" run-uniquify = run uniquify-module-names on current elaboration outputs" ######################################################################################### # include additional subproject make fragments @@ -159,6 +176,9 @@ export mfc_extra_anno_contents export sfc_extra_low_transforms_anno_contents $(FINAL_ANNO_FILE) $(MFC_EXTRA_ANNO_FILE) &: $(ANNO_FILE) echo "$$mfc_extra_anno_contents" > $(MFC_EXTRA_ANNO_FILE) +ifdef USE_CHISEL7 + jq '. + [{"class":"firrtl.transforms.BlackBoxTargetDirAnno","targetDir":"$(GEN_COLLATERAL_DIR)/blackboxes"}]' $(MFC_EXTRA_ANNO_FILE) > $(MFC_EXTRA_ANNO_FILE).tmp && mv $(MFC_EXTRA_ANNO_FILE).tmp $(MFC_EXTRA_ANNO_FILE) +endif jq -s '[.[][]]' $(ANNO_FILE) $(MFC_EXTRA_ANNO_FILE) > $(FINAL_ANNO_FILE) .PHONY: firrtl @@ -179,6 +199,12 @@ SFC_MFC_TARGETS = \ MFC_BASE_LOWERING_OPTIONS ?= emittedLineLength=2048,noAlwaysComb,disallowLocalVariables,verifLabels,disallowPortDeclSharing,locationInfoStyle=wrapInAtSquareBracket +# Extra firtool flags are only applied when building with Chisel 7 +FIRTOOL_EXTRA_FLAGS ?= +ifdef USE_CHISEL7 +FIRTOOL_EXTRA_FLAGS += --verification-flavor=if-else-fatal --disable-layers=Verification.Assume,Verification.Cover +endif + # DOC include start: FirrtlCompiler $(MFC_LOWERING_OPTIONS): mkdir -p $(dir $@) @@ -190,6 +216,7 @@ endif $(SFC_MFC_TARGETS) &: $(FIRRTL_FILE) $(FINAL_ANNO_FILE) $(MFC_LOWERING_OPTIONS) $(call require_cmd,$(FIRTOOL_BIN)) + $(require_firtool_version) rm -rf $(GEN_COLLATERAL_DIR) (set -o pipefail && $(FIRTOOL_BIN) \ --format=fir \ @@ -204,12 +231,34 @@ $(SFC_MFC_TARGETS) &: $(FIRRTL_FILE) $(FINAL_ANNO_FILE) $(MFC_LOWERING_OPTIONS) --repl-seq-mem-file=$(MFC_SMEMS_CONF) \ --annotation-file=$(FINAL_ANNO_FILE) \ --split-verilog \ + $(FIRTOOL_EXTRA_FLAGS) \ -o $(GEN_COLLATERAL_DIR) \ $(FIRRTL_FILE) |& tee $(FIRTOOL_LOG_FILE)) $(SED) $(SED_INPLACE) 's/.*/& /' $(MFC_SMEMS_CONF) # need trailing space for SFC macrocompiler - touch $(MFC_BB_MODS_FILELIST) # if there are no BB's then the file might not be generated, instead always generate it +ifdef USE_CHISEL7 + # Construct blackbox file list from files emitted into gen-collateral/blackboxes + @if [ -d "$(GEN_COLLATERAL_DIR)/blackboxes" ]; then \ + find "$(GEN_COLLATERAL_DIR)/blackboxes" -type f \( -name '*.v' -o -name '*.sv' -o -name '*.cc' \) | \ + sed -e 's;^$(GEN_COLLATERAL_DIR)/;;' > "$(MFC_BB_MODS_FILELIST)"; \ + else \ + : > "$(MFC_BB_MODS_FILELIST)"; \ + fi +else + # If there are no BB's then the file might not be generated; ensure it exists + touch $(MFC_BB_MODS_FILELIST) +endif # DOC include end: FirrtlCompiler +.PHONY: run-firtool +run-firtool: $(SFC_MFC_TARGETS) + @echo "[run-firtool] Generated: $(SFC_MFC_TARGETS)" + +# Convenience alias to re-run the uniquify step (module/filelist splitting) +.PHONY: run-uniquify +run-uniquify: $(TOP_MODS_FILELIST) $(MODEL_MODS_FILELIST) $(ALL_MODS_FILELIST) $(BB_MODS_FILELIST) $(MFC_MODEL_HRCHY_JSON_UNIQUIFIED) + @echo "[run-uniquify] Updated filelists under $(GEN_COLLATERAL_DIR)" + + $(TOP_MODS_FILELIST) $(MODEL_MODS_FILELIST) $(ALL_MODS_FILELIST) $(BB_MODS_FILELIST) $(MFC_MODEL_HRCHY_JSON_UNIQUIFIED) &: $(MFC_MODEL_HRCHY_JSON) $(MFC_TOP_HRCHY_JSON) $(MFC_FILELIST) $(MFC_BB_MODS_FILELIST) $(base_dir)/scripts/uniquify-module-names.py \ --model-hier-json $(MFC_MODEL_HRCHY_JSON) \ diff --git a/generators/rocket-chip b/generators/rocket-chip index 6342f0f3df..8f1e33b253 160000 --- a/generators/rocket-chip +++ b/generators/rocket-chip @@ -1 +1 @@ -Subproject commit 6342f0f3dff235b222c6a071ecd6eb746677d880 +Subproject commit 8f1e33b253e3bce741861c0a2e3ba8b7ff85b292 diff --git a/scripts/uniquify-module-names.py b/scripts/uniquify-module-names.py index 5c6a9613fb..ba651560a3 100755 --- a/scripts/uniquify-module-names.py +++ b/scripts/uniquify-module-names.py @@ -1,13 +1,39 @@ #!/usr/bin/env python3 +""" +Uniquify module names and split filelists for DUT vs Model. + +This script post-processes Verilog emitted by firtool to: + - Identify modules that appear in both the DUT and the Model hierarchies + - Create uniquified copies of shared module sources for the Model tree + - Rewrite parent instantiations in the Model tree to reference uniquified names + - Write separate filelists for the DUT and Model, plus an updated Model hierarchy JSON + +Inputs (CLI): + - --model-hier-json / --top-hier-json: firtool hierarchy JSONs + - --in-all-filelist / --in-bb-filelist: combined and blackbox filelists + - --dut / --model: root module names for the DUT and Model trees + - --target-dir: base directory holding generated collateral + - --gcpath: path to gen-collateral (for resolving relative paths) + +Outputs: + - --out-dut-filelist: filelist containing all Verilog/sidecars used by the DUT + - --out-model-filelist: filelist for the Model tree (with uniquified modules as needed) + - --out-model-hier-json: updated Model hierarchy JSON reflecting uniquified names + +Failure policy: fail fast with clear, actionable error messages. +""" import json import argparse import shutil import os import sys +import platform +import re +from typing import List, Dict, Set, Iterable, Tuple, Any, Optional, Match -parser = argparse.ArgumentParser(description="") +parser = argparse.ArgumentParser(description="Uniquify shared modules and split DUT/Model filelists") parser.add_argument("--model-hier-json", type=str, required=True, help="Path to hierarchy JSON emitted by firtool. Must include DUT as a module.") parser.add_argument("--top-hier-json", type=str, required=True, help="Path to hierarchy JSON emitted by firtool. Must include DUT as a module.") parser.add_argument('--in-all-filelist', type=str, required=True, help='Path to input filelist that has all modules (relative paths).') @@ -22,18 +48,95 @@ args = parser.parse_args() MODEL_SFX=args.model + "_UNIQUIFIED" -SED=os.environ.get("SED", "sed") - -def bash(cmd): +def die(msg: str) -> None: + """Print a fatal error message and terminate. + + Args: + msg: Description of the error condition. + """ + print(f"[uniquify-module-names] ERROR: {msg}", file=sys.stderr) + sys.exit(1) + +def replace_module_decl(path: str, old: str, new: str) -> None: + """Rename a SystemVerilog module declaration identifier in-place. + + Matches a declaration line (optionally with attributes) and replaces the + module name token 'old' with 'new'. Only the first occurrence is updated. + + Args: + path: Absolute path to the SV file to modify. + old: Existing module identifier to replace. + new: New module identifier to write. + """ + with open(path, 'r', encoding='utf-8') as f: + src = f.read() + # Match the start of a module declaration and replace only the module identifier + pat = re.compile(rf'^(?P\s*(?:\(\*.*?\*\)\s*)*module\s+)(?P{re.escape(old)})\b', + flags=re.MULTILINE | re.DOTALL) + new_src, n = pat.subn(rf'\g{new}', src, count=1) + if n == 0: + # Fallback without attributes + pat2 = re.compile(rf'^(?P\s*module\s+)(?P{re.escape(old)})\b', flags=re.MULTILINE) + new_src, _ = pat2.subn(rf'\g{new}', src, count=1) + if new_src != src: + with open(path, 'w', encoding='utf-8') as f: + f.write(new_src) + + +def replace_module_instantiation(path: str, old: str, new: str) -> None: + """Rename a module identifier at the start of an instantiation header. + + This updates lines of the form: + [attrs] [#(...)] ( + to instead start with while preserving attributes, parameters, + instance name, and following punctuation. + + Args: + path: Absolute path to the SV file to modify. + old: Module identifier to search for at instantiation header. + new: Replacement module identifier. + """ + with open(path, 'r', encoding='utf-8') as f: + src = f.read() + # Match an instantiation header: optional attributes, module id, optional params, instance name, followed by '(' or '[' + pattxt = rf'''^ + (?P\s*(?:\(\*.*?\*\)\s*)*) # optional attributes + (?P{re.escape(old)})\b # module id token + (?P\s*\#\s*\([^;]*?\))? # optional parameterization + (?P\s+) # whitespace before instance name + (?P[A-Za-z_][\w$]*)\s* # instance name + (?=\(|\[) # followed by ( or [ (ports or array) + ''' + pat = re.compile(pattxt, flags=re.MULTILINE | re.DOTALL | re.VERBOSE) + + def _repl(m: re.Match) -> str: + return f"{m.group('prefix')}{new}{m.group('params') or ''}{m.group('spaces')}{m.group('inst')}" + + new_src, _ = pat.subn(_repl, src) + if new_src != src: + with open(path, 'w', encoding='utf-8') as f: + f.write(new_src) + + +def bash(cmd: str) -> None: + """Execute a shell command, exiting on failure (debug helper).""" fail = os.system(cmd) if fail: - print(f'[*] failed to execute {cmd}') - sys.exit(1) + die(f"failed to execute shell command: {cmd}") else: print(cmd) -def bfs_collect_modules(tree, child_to_ignore = None): +def bfs_collect_modules(tree: Dict[str, Any], child_to_ignore: Optional[str] = None) -> List[str]: + """Breadth-first traversal collecting module names from a hierarchy tree. + + Args: + tree: Parsed JSON object with keys 'instance_name', 'module_name', 'instances'. + child_to_ignore: Optional module name to skip descending into. + + Returns: + A list of module names in BFS order (may include duplicates). + """ q = [(tree['instance_name'], tree['module_name'], tree['instances'])] modules = list() @@ -48,73 +151,284 @@ def bfs_collect_modules(tree, child_to_ignore = None): q.append((c['instance_name'], c['module_name'], c['instances'])) return modules -def get_modules_in_verilog_file(file): - module_names = list() - with open(file) as f: - lines = f.readlines() - for line in lines: - words = line.split() - if len(words) > 0 and words[0] == "module": - module_names.append(words[1].replace("(", "").replace(")", "").replace(";", "")) +def get_modules_in_verilog_file(file: str) -> List[str]: + """Extract declared module names from a SystemVerilog file. + + Supports optional attributes ("(* ... *)") and lifetime (automatic/static). + + Args: + file: Absolute path to the .sv/.v file. + + Returns: + A list of declared module identifiers in the file. + """ + module_names: List[str] = [] + if not os.path.exists(file): + die(f"Verilog source not found while scanning filelists: {file}") + try: + with open(file, encoding='utf-8', errors='ignore') as f: + src = f.read() + except Exception as e: + die(f"Failed to read Verilog source '{file}': {e}") + # Match SV module declarations with optional attributes and optional lifetime + # Examples: + # module Foo ( + # (* keep_hierarchy *) module automatic Bar #( + mod_decl = re.compile(r'^\s*(?:\(\*.*?\*\)\s*)*module\s+(?:automatic\s+|static\s+)?(?P[A-Za-z_][\w$]*)\b', + flags=re.MULTILINE | re.DOTALL) + for m in mod_decl.finditer(src): + module_names.append(m.group('name')) return module_names -def get_modules_in_filelist(filelist, verilog_module_filename, cc_filelist): - with open(filelist) as fl: - lines = fl.readlines() - for line in lines: - path = line.strip() - basepath = os.path.basename(path) - ext = basepath.split(".")[-1] - - if (ext == "v") or (ext == "sv"): - modules = get_modules_in_verilog_file(os.path.join(args.gcpath, basepath)) - for module in modules: - verilog_module_filename[module] = basepath +scanned_sv_files: List[str] = [] +support_sv_files: List[str] = [] # .sv/.v files without any module declarations (packages, binds, etc.) +all_sv_files: List[str] = [] # all .sv/.v paths from filelist (relative or absolute) + +def _normalize_path_token(tok: str) -> str: + """Normalize a token from a filelist line into a path. + + - Strips quotes + - Converts absolute paths under target-dir to gcpath-relative + + Args: + tok: Raw token string from a filelist. + + Returns: + Normalized path token. + """ + tok = tok.strip() + # Strip surrounding quotes if present + if (tok.startswith('"') and tok.endswith('"')) or (tok.startswith("'") and tok.endswith("'")): + tok = tok[1:-1] + # Normalize absolute paths under target-dir to gcpath-relative + if os.path.isabs(tok) and os.path.abspath(tok).startswith(os.path.abspath(args.target_dir) + os.sep): + rel = os.path.relpath(os.path.abspath(tok), os.path.abspath(args.target_dir)) + return rel + return tok + +def _iter_filelist_entries(root_filelist: str) -> Iterable[str]: + """Yield all file entries recursively from a root filelist. + + Supports -f/-F and @file includes, comment stripping, and basic flag skipping. + Paths are normalized via _normalize_path_token. + """ + visited: Set[str] = set() + stack: List[str] = [root_filelist] + while stack: + fl = stack.pop() + fkey = os.path.abspath(fl) + if fkey in visited: + continue + visited.add(fkey) + if not os.path.exists(fl): + # Try relative to gcpath + alt = os.path.join(args.gcpath, fl) + if os.path.exists(alt): + fl = alt else: - cc_filelist.append(basepath) + die(f"Included filelist not found: {fl}") + try: + with open(fl, encoding='utf-8', errors='ignore') as f: + for raw_line in f: + # Drop comments after // and # + line = raw_line.split('//',1)[0].split('#',1)[0].strip() + if not line: + continue + tokens = line.split() + i = 0 + while i < len(tokens): + tok = tokens[i] + if tok in ('-f','-F'): + if i+1 >= len(tokens): + die(f"Malformed filelist include line in {fl}: '{raw_line.strip()}'") + inc = _normalize_path_token(tokens[i+1]) + stack.append(inc) + i += 2 + continue + # Explicit single-file include flags (e.g., '-v ') + if tok == '-v': + if i+1 >= len(tokens): + die(f"Malformed -v entry in {fl}: '{raw_line.strip()}'") + path_tok = _normalize_path_token(tokens[i+1]) + yield path_tok + i += 2 + continue + # Some flows use '-sv '; treat next token as a file iff it looks like one + if tok == '-sv' and i+1 < len(tokens): + nxt = _normalize_path_token(tokens[i+1]) + if nxt.lower().endswith(('.sv', '.v')): + yield nxt + i += 2 + continue + if tok.startswith('@') and len(tok) > 1: + inc = _normalize_path_token(tok[1:]) + stack.append(inc) + i += 1 + continue + # Skip flags we don't consume + if tok.startswith(('+incdir+','-I','-y','-timescale','+define+')) or tok in ('-sv',): + i += 1 + continue + yield _normalize_path_token(tok) + i += 1 + except Exception as e: + die(f"Failed to read filelist '{fl}': {e}") + +def get_modules_in_filelist(filelist: str, + verilog_module_filename: Dict[str, str], + cc_filelist: List[str]) -> Tuple[Dict[str, str], List[str]]: + """Populate a module→file mapping by scanning a (possibly nested) filelist. + + Args: + filelist: Path to the root filelist. + verilog_module_filename: Mapping to update with module name → source path. + cc_filelist: List to append non-Verilog sidecar files to. + + Returns: + Tuple of (verilog_module_filename, cc_filelist). + """ + if not os.path.exists(filelist): + # Try relative to gcpath + alt = os.path.join(args.gcpath, filelist) + if not os.path.exists(alt): + die(f"Input filelist not found: {filelist}") + filelist = alt + for path in _iter_filelist_entries(filelist): + ext = os.path.basename(path).split('.')[-1] + if ext in ("v","sv"): + abs_for_read = path if os.path.isabs(path) else os.path.join(args.gcpath, path) + scanned_sv_files.append(abs_for_read) + all_sv_files.append(path) + modules = get_modules_in_verilog_file(abs_for_read) + for module in modules: + verilog_module_filename[module] = path + if not modules: + # Track Verilog/SystemVerilog files without module declarations (packages, binds, etc.) + support_sv_files.append(path) + else: + cc_filelist.append(path) return (verilog_module_filename, cc_filelist) -def get_modules_under_hier(hier, child_to_ignore=None): - with open(hier) as hj: - hj_data = json.load(hj) +def get_modules_under_hier(hier: str, child_to_ignore: Optional[str] = None) -> Set[str]: + """Load a hierarchy JSON from disk and collect module names in it.""" + if not os.path.exists(hier): + die(f"Hierarchy JSON not found: {hier}") + try: + with open(hier, encoding='utf-8') as hj: + hj_data = json.load(hj) + except Exception as e: + die(f"Failed to parse hierarchy JSON '{hier}': {e}") + try: modules_under_hier = set(bfs_collect_modules(hj_data, child_to_ignore=child_to_ignore)) + except Exception as e: + die(f"Failed to traverse hierarchy JSON '{hier}': {e}") return modules_under_hier -def write_verilog_filelist(modules, verilog_module_filename, out_filelist): - written_files = set() +def get_modules_under_hier_obj(hj_data: Dict[str, Any], + child_to_ignore: Optional[str] = None) -> Set[str]: + """Collect module names from an in-memory hierarchy JSON object.""" + try: + return set(bfs_collect_modules(hj_data, child_to_ignore=child_to_ignore)) + except Exception as e: + die(f"Failed to traverse in-memory hierarchy JSON: {e}") + +def write_verilog_filelist(modules: Iterable[str], + verilog_module_filename: Dict[str, str], + out_filelist: str) -> Set[str]: + """Write a filelist containing Verilog sources for the given module set. + + Returns the set of unique file paths written. + """ + written_files: Set[str] = set() existing_modules = verilog_module_filename.keys() with open(out_filelist, "w") as df: for module in modules: if module in existing_modules: - verilog_filename = verilog_module_filename[module] + verilog_filename = verilog_module_filename[module] # relative to gcpath if verilog_filename not in written_files: written_files.add(verilog_filename) - if args.target_dir in verilog_filename: + # Always prefix with target_dir unless the path is already absolute + if os.path.isabs(verilog_filename): df.write(f"{verilog_filename}\n") else: df.write(f"{args.target_dir}/{verilog_filename}\n") return written_files -def write_cc_filelist(filelist, out_filelist): +def write_cc_filelist(filelist: Iterable[str], out_filelist: str) -> None: + """Append non-Verilog sidecar paths to an existing filelist file.""" with open(out_filelist, "a") as df: for path in filelist: - file = os.path.basename(path) - df.write(f"{args.target_dir}/{file}\n") - -def generate_copy(c, sfx): - (cur_name, ext) = os.path.splitext(c) - new_name = cur_name + "_" + sfx - new_file = new_name + ext - - cur_file = os.path.join(args.gcpath, c) - new_file = os.path.join(args.gcpath, new_file) + # Preserve relative layout for non-Verilog files as well + if os.path.isabs(path): + df.write(f"{path}\n") + else: + df.write(f"{args.target_dir}/{path}\n") - shutil.copy(cur_file, new_file) - bash(rf"{SED} -i 's/module\( \+\){cur_name}/module\1{new_name}/' {new_file}") - return new_file +def write_support_sv_files(files: Iterable[str], out_filelist: str) -> None: + """Append SV/V files that contain no module declarations (e.g., packages, binds).""" + with open(out_filelist, "a") as df: + for path in files: + if os.path.isabs(path): + df.write(f"{path}\n") + else: + df.write(f"{args.target_dir}/{path}\n") -def bfs_uniquify_modules(tree, common_fnames, verilog_module_filename): +def write_additional_sv_files(all_files: Iterable[str], already_written: Set[str], out_filelist: str) -> None: + """Append any SV/V files from filelist not already emitted (e.g., verification/* helpers).""" + with open(out_filelist, "a") as df: + for path in all_files: + # Normalize compare key to the same style as written (stored without target_dir prefix) + rel = path if not os.path.isabs(path) else os.path.relpath(path, args.target_dir) + # The 'already_written' set tracks gen-collateral-relative paths; strip any leading './' + key = rel.lstrip('./') + if key in already_written: + continue + if os.path.isabs(path): + df.write(f"{path}\n") + else: + df.write(f"{args.target_dir}/{path}\n") + +def generate_copy(rel_path: str, sfx: str) -> str: + """Duplicate a Verilog file under gcpath with a suffixed module name. + + The new file is placed alongside the original (preserving directories) and + its module declaration is renamed to include '_'. + + Args: + rel_path: Source file path (relative to gcpath or absolute). + sfx: Suffix to append to the module identifier and file base name. + + Returns: + New relative path (from gcpath) of the copied file. + """ + # rel_path may be relative to args.gcpath or absolute + dirname = os.path.dirname(rel_path) + basename = os.path.basename(rel_path) + (base_no_ext, ext) = os.path.splitext(basename) + + # New module/file name with suffix, preserving directory structure + new_basename = base_no_ext + "_" + sfx + new_rel_path = os.path.join(dirname, new_basename + ext) if dirname else (new_basename + ext) + + # Resolve absolute paths + src_abs = rel_path if os.path.isabs(rel_path) else os.path.join(args.gcpath, rel_path) + dst_abs = os.path.join(args.gcpath, new_rel_path) + + if not os.path.exists(src_abs): + raise FileNotFoundError(f"source not found: {src_abs}") + + os.makedirs(os.path.dirname(dst_abs) or args.gcpath, exist_ok=True) + shutil.copy(src_abs, dst_abs) + # Update module declaration inside the copied file (Python-based, no sed) + replace_module_decl(dst_abs, base_no_ext, new_basename) + # Return the new file path relative to gcpath + return new_rel_path + +def bfs_uniquify_modules(tree: Dict[str, Any], + common_fnames: Set[str], + verilog_module_filename: Dict[str, str]) -> None: + """Breadth-first pass to copy shared modules and update parent instantiations.""" q = [(tree['instance_name'], tree['module_name'], tree['instances'], None)] updated_submodule = set() existing_modules = verilog_module_filename.keys() @@ -124,9 +438,29 @@ def bfs_uniquify_modules(tree, common_fnames, verilog_module_filename): q.pop(0) (inst, mod, child, parent) = front - # external module + # external or unmapped module from filelists if mod not in existing_modules: - assert(len(child) == 0) + if len(child) != 0: + # Heuristic 1: find a scanned SV file whose basename matches the module name + matched_path = None + for fpath in scanned_sv_files: + base = os.path.splitext(os.path.basename(fpath))[0] + if base == mod: + matched_path = fpath + break + if matched_path is not None: + # Add a mapping and proceed (relative to gcpath if applicable) + rel = os.path.relpath(matched_path, os.path.abspath(args.gcpath)) + verilog_module_filename[mod] = rel if not rel.startswith('..') else matched_path + cur_file = verilog_module_filename[mod] + else: + die( + "Hierarchy references a module not found in filelists with children: " + f"module='{mod}', parent='{parent}', children={len(child)}. " + "Ensure this module's Verilog is included in filelist.f or blackbox filelist, " + "or that it is a leaf (e.g., a pure blackbox)." + ) + # Leaf external is acceptable; nothing to copy/rename continue cur_file = verilog_module_filename[mod] @@ -137,22 +471,27 @@ def bfs_uniquify_modules(tree, common_fnames, verilog_module_filename): try: new_file = generate_copy(cur_file, MODEL_SFX) if parent is not None and ((parent, mod) not in updated_submodule): - parent_file = os.path.join(args.gcpath, verilog_module_filename[parent]) - bash(rf"{SED} -i 's/\( \*\){mod}\( \+\)/\1{mod}_{MODEL_SFX}\2/' {parent_file}") + parent_path = verilog_module_filename[parent] + parent_file = parent_path if os.path.isabs(parent_path) else os.path.join(args.gcpath, parent_path) + # Update the parent instantiation to reference the uniquified module name + replace_module_instantiation(parent_file, mod, f"{mod}_{MODEL_SFX}") updated_submodule.add((parent, mod)) # add the uniquified module to the verilog_modul_filename dict new_mod = mod + "_" + MODEL_SFX verilog_module_filename[new_mod] = new_file - except: - print(f"No corresponding file for {cur_file}") + except Exception as e: + die(f"Failed to uniquify module '{mod}' from source '{cur_file}': {e}") # traverse its children for c in child: if c['module_name'] != args.dut: q.append((c['instance_name'], c['module_name'], c['instances'], new_mod)) -def dfs_update_modules(tree, common_fnames, visited): +def dfs_update_modules(tree: Dict[str, Any], + common_fnames: Set[str], + visited: Set[str]) -> bool: + """Depth-first pass to rewrite child module names in the hierarchy JSON.""" # List of direct submodules to update childs_to_update = list() for child in tree['instances']: @@ -174,17 +513,33 @@ def dfs_update_modules(tree, common_fnames, visited): visited.add(cur_module) return (new_file is not None) -def uniquify_modules_under_model(modules_under_model, common_modules, verilog_module_filename): - with open(args.model_hier_json) as imhj: - imhj_data = json.load(imhj) - visited = set() - bfs_uniquify_modules(imhj_data, common_modules, verilog_module_filename) - dfs_update_modules (imhj_data, common_modules, visited) - - with open(args.out_model_hier_json, "w+") as out_file: +def uniquify_modules_under_model(modules_under_model: Set[str], + common_modules: Set[str], + verilog_module_filename: Dict[str, str]) -> Set[str]: + """Uniquify common modules under the model tree and return updated module set.""" + try: + with open(args.model_hier_json, encoding='utf-8') as imhj: + imhj_data = json.load(imhj) + except Exception as e: + die(f"Failed to parse model hierarchy JSON '{args.model_hier_json}': {e}") + + visited = set() + bfs_uniquify_modules(imhj_data, common_modules, verilog_module_filename) + dfs_update_modules (imhj_data, common_modules, visited) + + try: + out_dir = os.path.dirname(args.out_model_hier_json) + if out_dir: + os.makedirs(out_dir, exist_ok=True) + with open(args.out_model_hier_json, "w+", encoding='utf-8') as out_file: json.dump(imhj_data, out_file, indent=2) + except Exception as e: + die(f"Failed to write updated model hierarchy JSON '{args.out_model_hier_json}': {e}") + + return get_modules_under_hier_obj(imhj_data, args.dut) -def main(): +def main() -> None: + """Program entry: parse inputs, uniquify, and emit outputs.""" verilog_module_filename = dict() cc_filelist = list() get_modules_in_filelist(args.in_all_filelist, verilog_module_filename, cc_filelist) @@ -195,14 +550,17 @@ def main(): common_modules = modules_under_top.intersection(modules_under_model) # write top filelist - write_verilog_filelist(modules_under_top, verilog_module_filename, args.out_dut_filelist) + written_top = write_verilog_filelist(modules_under_top, verilog_module_filename, args.out_dut_filelist) + # Only include DUT hierarchy-driven Verilog in top filelist; do not append + # verification/* helpers or model-only sources to avoid pulling TestHarness, etc. - # rename modules that are common - uniquify_modules_under_model(modules_under_model, common_modules, verilog_module_filename) - uniquified_modules_under_model = get_modules_under_hier(args.out_model_hier_json, args.dut) + # rename modules that are common and compute updated model hierarchy + uniquified_modules_under_model = uniquify_modules_under_model(modules_under_model, common_modules, verilog_module_filename) # write model filelist - write_verilog_filelist(uniquified_modules_under_model, verilog_module_filename, args.out_model_filelist) + written_model = write_verilog_filelist(uniquified_modules_under_model, verilog_module_filename, args.out_model_filelist) + write_support_sv_files(support_sv_files, args.out_model_filelist) + write_additional_sv_files(all_sv_files, written_model, args.out_model_filelist) write_cc_filelist (cc_filelist, args.out_model_filelist) diff --git a/tools/stage-chisel7/src/main/scala/ChipyardAnnotations.scala b/tools/stage-chisel7/src/main/scala/ChipyardAnnotations.scala new file mode 100644 index 0000000000..63ac7f2788 --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/ChipyardAnnotations.scala @@ -0,0 +1,68 @@ +// See LICENSE for license details. +// Based on Rocket Chip's stage implementation + +package chipyard.stage + +import chisel3.experimental.BaseModule +import firrtl.annotations.{Annotation, NoTargetAnnotation} +import firrtl.options.{HasShellOptions, ShellOption, Unserializable} + +trait ChipyardOption extends Unserializable { this: Annotation => } + +/** This hijacks the existing ConfigAnnotation to accept the legacy _-delimited format */ +private[stage] object UnderscoreDelimitedConfigsAnnotation extends HasShellOptions { + override val options = Seq( + new ShellOption[String]( + longOption = "legacy-configs", + toAnnotationSeq = a => { + val split = a.split(':') + assert(split.length == 2, s"'${a}' split by ':' doesn't yield two things") + val packageName = split.head + val configs = split.last.split("_") + Seq(new ConfigsAnnotation(configs map { config => if (config contains ".") s"${config}" else s"${packageName}.${config}" } )) + }, + helpText = "A string of underscore-delimited configs (configs have decreasing precendence from left to right).", + shortOption = Some("LC") + ) + ) +} + +/** Paths to config classes */ +case class ConfigsAnnotation(configNames: Seq[String]) extends NoTargetAnnotation with ChipyardOption +private[stage] object ConfigsAnnotation extends HasShellOptions { + override val options = Seq( + new ShellOption[Seq[String]]( + longOption = "configs", + toAnnotationSeq = a => Seq(ConfigsAnnotation(a)), + helpText = "", + shortOption = Some("C") + ) + ) +} + +case class TopModuleAnnotation(clazz: Class[_ <: Any]) extends NoTargetAnnotation with ChipyardOption +private[stage] object TopModuleAnnotation extends HasShellOptions { + override val options = Seq( + new ShellOption[String]( + longOption = "top-module", + toAnnotationSeq = a => Seq(TopModuleAnnotation(Class.forName(a).asInstanceOf[Class[_ <: BaseModule]])), + helpText = "", + shortOption = Some("T") + ) + ) +} + +/** Optional base name for generated files' filenames */ +case class OutputBaseNameAnnotation(outputBaseName: String) extends NoTargetAnnotation with ChipyardOption +private[stage] object OutputBaseNameAnnotation extends HasShellOptions { + override val options = Seq( + new ShellOption[String]( + longOption = "name", + toAnnotationSeq = a => Seq(OutputBaseNameAnnotation(a)), + helpText = "", + shortOption = Some("n") + ) + ) +} + + diff --git a/tools/stage-chisel7/src/main/scala/ChipyardCli.scala b/tools/stage-chisel7/src/main/scala/ChipyardCli.scala new file mode 100644 index 0000000000..d6071ff35f --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/ChipyardCli.scala @@ -0,0 +1,17 @@ +// See LICENSE for license details. +// Based on Rocket Chip's stage implementation + +package chipyard.stage + +import firrtl.options.Shell + +trait ChipyardCli { this: Shell => + + parser.note("Chipyard Generator Options") + Seq( + TopModuleAnnotation, + ConfigsAnnotation, + OutputBaseNameAnnotation, + UnderscoreDelimitedConfigsAnnotation + ).foreach(_.addOptions(parser)) +} diff --git a/tools/stage-chisel7/src/main/scala/ChipyardOptions.scala b/tools/stage-chisel7/src/main/scala/ChipyardOptions.scala new file mode 100644 index 0000000000..f1d0ac08ec --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/ChipyardOptions.scala @@ -0,0 +1,40 @@ +// See LICENSE + +package chipyard.stage + +class ChipyardOptions private[stage] ( + val topModule: Option[Class[_ <: Any]] = None, + val configNames: Option[Seq[String]] = None, + val outputBaseName: Option[String] = None) { + + private[stage] def copy( + topModule: Option[Class[_ <: Any]] = topModule, + configNames: Option[Seq[String]] = configNames, + outputBaseName: Option[String] = outputBaseName, + ): ChipyardOptions = { + + new ChipyardOptions( + topModule=topModule, + configNames=configNames, + outputBaseName=outputBaseName, + ) + } + + lazy val topPackage: Option[String] = topModule match { + case Some(a) => Some(a.getPackage.getName) + case _ => None + } + + lazy val configClass: Option[String] = configNames match { + case Some(names) => + val classNames = names.map{ n => n.split('.').last } + Some(classNames.mkString("_")) + case _ => None + } + + lazy val longName: Option[String] = outputBaseName match { + case Some(name) => Some(name) + case _ => + if (!topPackage.isEmpty && !configClass.isEmpty) Some(s"${topPackage.get}.${configClass.get}") else None + } +} diff --git a/tools/stage-chisel7/src/main/scala/ChipyardStage.scala b/tools/stage-chisel7/src/main/scala/ChipyardStage.scala new file mode 100644 index 0000000000..bda626baa4 --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/ChipyardStage.scala @@ -0,0 +1,60 @@ +// See LICENSE for license details. +// Based on Rocket Chip's stage implementation + +package chipyard.stage + +import circt.stage.{ChiselStage, CIRCTTargetAnnotation, CIRCTTarget} +import firrtl.options.{Shell} +import firrtl.options.Viewer.view +import firrtl.{AnnotationSeq} +import firrtl.options.{Phase, PhaseManager, Shell, Dependency} + +final class ChipyardChiselStage extends ChiselStage { + override def run(annotations: AnnotationSeq): AnnotationSeq = { + val pm = new PhaseManager( + targets = Seq( + Dependency[chisel3.stage.phases.Checks], + Dependency[chisel3.stage.phases.AddImplicitOutputFile], + Dependency[chisel3.stage.phases.AddImplicitOutputAnnotationFile], + Dependency[chisel3.stage.phases.AddSerializationAnnotations], + Dependency[chisel3.stage.phases.Convert], + Dependency[chisel3.stage.phases.AddDedupGroupAnnotations], + Dependency[circt.stage.phases.AddImplicitOutputFile], + Dependency[circt.stage.phases.Checks], + Dependency[circt.stage.phases.CIRCT] + ), + currentState = Seq( + Dependency[firrtl.stage.phases.AddDefaults], + Dependency[firrtl.stage.phases.Checks] + ) + ) + pm.transform(annotations :+ CIRCTTargetAnnotation(CIRCTTarget.CHIRRTL)) + } +} + +class ChipyardStage extends ChiselStage { + override val shell = new Shell("chipyard") with ChipyardCli with circt.stage.CLI { + // These are added by firrtl.options.Shell (which we must extend because we are a Stage) + override protected def includeLoggerOptions = false + } + override def run(annotations: AnnotationSeq): AnnotationSeq = { + val pm = new PhaseManager( + targets = Seq( + Dependency[chipyard.stage.phases.Checks], + Dependency[chipyard.stage.phases.TransformAnnotations], + Dependency[chipyard.stage.phases.PreElaboration], + Dependency[ChipyardChiselStage], + Dependency[chipyard.stage.phases.GenerateFirrtlAnnos], + Dependency[chipyard.stage.phases.AddDefaultTests], + Dependency[chipyard.stage.phases.GenerateTestSuiteMakefrags], + Dependency[chipyard.stage.phases.GenerateArtefacts], + ), + currentState = Seq( + Dependency[firrtl.stage.phases.AddDefaults], + Dependency[firrtl.stage.phases.Checks] + ) + ) + pm.transform(annotations) + } + override final def invalidates(a: Phase): Boolean = false +} diff --git a/tools/stage-chisel7/src/main/scala/StageUtils.scala b/tools/stage-chisel7/src/main/scala/StageUtils.scala new file mode 100644 index 0000000000..5654c20ad3 --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/StageUtils.scala @@ -0,0 +1,32 @@ +// See LICENSE + +package chipyard.stage + +import java.io.{File, FileWriter} + +import org.chipsalliance.cde.config.{Config, Parameters} +import freechips.rocketchip.util.{BlackBoxedROM, ROMGenerator} + +trait HasChipyardStageUtils { + + def getConfig(fullConfigClassNames: Seq[String]): Config = { + new Config(fullConfigClassNames.foldRight(Parameters.empty) { case (currentName, config) => + val currentConfig = try { + Class.forName(currentName).newInstance.asInstanceOf[Config] + } catch { + case e: java.lang.ClassNotFoundException => + throw new Exception(s"""Unable to find part "$currentName" from "$fullConfigClassNames", did you misspell it or specify the wrong package path?""", e) + } + currentConfig ++ config + }) + } + + def writeOutputFile(targetDir: String, fname: String, contents: String): File = { + val f = new File(targetDir, fname) + val fw = new FileWriter(f) + fw.write(contents) + fw.close + f + } + +} diff --git a/tools/stage-chisel7/src/main/scala/package.scala b/tools/stage-chisel7/src/main/scala/package.scala new file mode 100644 index 0000000000..5dc89f9cb1 --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/package.scala @@ -0,0 +1,24 @@ +// See LICENSE + +package chipyard + +import firrtl.AnnotationSeq +import firrtl.options.OptionsView + +package object stage { + + implicit object ChipyardOptionsView extends OptionsView[ChipyardOptions] { + + def view(annotations: AnnotationSeq): ChipyardOptions = annotations + .collect { case a: ChipyardOption => a } + .foldLeft(new ChipyardOptions()){ (c, x) => + x match { + case TopModuleAnnotation(a) => c.copy(topModule = Some(a)) + case ConfigsAnnotation(a) => c.copy(configNames = Some(a)) + case OutputBaseNameAnnotation(a) => c.copy(outputBaseName = Some(a)) + } + } + + } + +} diff --git a/tools/stage-chisel7/src/main/scala/phases/AddDefaultTests.scala b/tools/stage-chisel7/src/main/scala/phases/AddDefaultTests.scala new file mode 100644 index 0000000000..758131ad05 --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/phases/AddDefaultTests.scala @@ -0,0 +1,52 @@ +// See LICENSE for license details. +// Based on Rocket Chip's stage implementation + +package chipyard.stage.phases + +import scala.util.Try +import scala.collection.mutable + +import org.chipsalliance.cde.config.Parameters +import chisel3.stage.phases.Elaborate +import firrtl.AnnotationSeq +import firrtl.annotations.{Annotation, NoTargetAnnotation} +import firrtl.options._ +import firrtl.options.Viewer._ +import freechips.rocketchip.system.{RocketTestSuite, TestGeneration} +import freechips.rocketchip.subsystem.{TilesLocated, InSubsystem, MaxXLen} + +import chipyard.TestSuiteHelper +import chipyard.TestSuitesKey +import chipyard.stage._ + +/** Annotation that contains a list of [[RocketTestSuite]]s to run */ +case class ChipyardTestSuiteAnnotation(tests: Seq[RocketTestSuite]) extends NoTargetAnnotation with Unserializable + + +class AddDefaultTests extends Phase with PreservesAll with HasChipyardStageUtils { + override val prerequisites = Seq(Dependency[ChipyardChiselStage]) + override val optionalPrerequisiteOf = Seq(Dependency[GenerateTestSuiteMakefrags]) + + private def addTestSuiteAnnotations(implicit p: Parameters): Seq[Annotation] = { + val annotations = mutable.ArrayBuffer[Annotation]() + val suiteHelper = new TestSuiteHelper + // Use Xlen as a proxy for detecting if we are a processor-like target + // The underlying test suites expect this field to be defined + val tileParams = p(TilesLocated(InSubsystem)) map (tp => tp.tileParams) + if (p.lift(MaxXLen).nonEmpty) + // If a custom test suite is set up, use the custom test suite + annotations += CustomMakefragSnippet(p(TestSuitesKey).apply(tileParams, suiteHelper, p)) + + ChipyardTestSuiteAnnotation(suiteHelper.suites.values.toSeq) +: annotations.toSeq + } + + + override def transform(annotations: AnnotationSeq): AnnotationSeq = { + val (testSuiteAnnos, oAnnos) = annotations.partition { + case ChipyardTestSuiteAnnotation(_) => true + case o => false + } + implicit val p = getConfig(view[ChipyardOptions](annotations).configNames.get).toInstance + addTestSuiteAnnotations(p) ++ oAnnos + } +} diff --git a/tools/stage-chisel7/src/main/scala/phases/Checks.scala b/tools/stage-chisel7/src/main/scala/phases/Checks.scala new file mode 100644 index 0000000000..b7eaae6ece --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/phases/Checks.scala @@ -0,0 +1,47 @@ +// See LICENSE + +package chipyard.stage.phases + +import firrtl.AnnotationSeq +import firrtl.annotations.Annotation +import firrtl.options.{OptionsException, Phase, TargetDirAnnotation} +import chipyard.stage._ + +import scala.collection.mutable + +/** Checks for the correct type and number of command line arguments */ +class Checks extends Phase with PreservesAll { + + override def transform(annotations: AnnotationSeq): AnnotationSeq = { + val targetDir, topModule, configNames, outputBaseName = mutable.ListBuffer[Annotation]() + + annotations.foreach { + case a: TargetDirAnnotation => a +=: targetDir + case a: TopModuleAnnotation => a +=: topModule + case a: ConfigsAnnotation => a +=: configNames + case a: OutputBaseNameAnnotation => a +=: outputBaseName + case _ => + } + + def required(annoList: mutable.ListBuffer[Annotation], option: String): Unit = { + if (annoList.size != 1) { + throw new OptionsException(s"Exactly one $option required") + } + } + + def optional(annoList: mutable.ListBuffer[Annotation], option: String): Unit = { + if (annoList.size > 1) { + throw new OptionsException(s"Too many $option options have been specified") + } + } + + required(targetDir, "target directory") + required(topModule, "top module") + required(configNames, "configs string (','-delimited)") + + optional(outputBaseName, "output base name") + + annotations + } + +} diff --git a/tools/stage-chisel7/src/main/scala/phases/GenerateArtefacts.scala b/tools/stage-chisel7/src/main/scala/phases/GenerateArtefacts.scala new file mode 100644 index 0000000000..0982ae36fd --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/phases/GenerateArtefacts.scala @@ -0,0 +1,26 @@ +// See LICENSE + +package chipyard.stage.phases + +import firrtl.AnnotationSeq +import firrtl.options.{Dependency, Phase, StageOptions} +import firrtl.options.Viewer.view +import chipyard.stage._ +import freechips.rocketchip.util.{ElaborationArtefacts} + +/** Writes [[ElaborationArtefacts]] into files */ +class GenerateArtefacts extends Phase with PreservesAll with HasChipyardStageUtils { + + override val prerequisites = Seq(Dependency[chipyard.stage.ChipyardChiselStage]) + + override def transform(annotations: AnnotationSeq): AnnotationSeq = { + val targetDir = view[StageOptions](annotations).targetDir + + ElaborationArtefacts.files.foreach { case (extension, contents) => + writeOutputFile(targetDir, s"${view[ChipyardOptions](annotations).longName.get}.${extension}", contents ()) + } + + annotations + } + +} diff --git a/tools/stage-chisel7/src/main/scala/phases/GenerateFirrtlAnnos.scala b/tools/stage-chisel7/src/main/scala/phases/GenerateFirrtlAnnos.scala new file mode 100644 index 0000000000..ec69aef210 --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/phases/GenerateFirrtlAnnos.scala @@ -0,0 +1,34 @@ +// See LICENSE + +package chipyard.stage.phases + +import firrtl.AnnotationSeq +import firrtl.annotations.{JsonProtocol} +import firrtl.options.Viewer.view +import firrtl.options._ +import chipyard.stage._ + +/** Writes FIRRTL annotations into a file */ +class GenerateFirrtlAnnos extends Phase with PreservesAll with HasChipyardStageUtils { + + override val prerequisites = Seq(Dependency[chipyard.stage.ChipyardChiselStage]) + + override def transform(annotations: AnnotationSeq): AnnotationSeq = { + val targetDir = view[StageOptions](annotations).targetDir + val fileName = s"${view[ChipyardOptions](annotations).longName.get}.anno.json" + + val annos = annotations.view.flatMap { + // Remove TargetDirAnnotation so that we can pass as argument to FIRRTL + // Remove CustomFileEmission, those are serialized automatically by Stages + case (_: Unserializable | _: TargetDirAnnotation | _: CustomFileEmission) => + None + case a => + Some(a) + } + + writeOutputFile(targetDir, fileName, JsonProtocol.serialize(annos.toSeq)) + + annotations + } + +} diff --git a/tools/stage-chisel7/src/main/scala/phases/GenerateTestSuiteMakefrags.scala b/tools/stage-chisel7/src/main/scala/phases/GenerateTestSuiteMakefrags.scala new file mode 100644 index 0000000000..18e73121b5 --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/phases/GenerateTestSuiteMakefrags.scala @@ -0,0 +1,49 @@ +// See LICENSE for license details. +// Based on Rocket Chip's stage implementation + +package chipyard.stage.phases + +import scala.collection.mutable + +import firrtl.AnnotationSeq +import firrtl.annotations.{Annotation, NoTargetAnnotation} +import firrtl.options.{Phase, StageOptions, Unserializable, Dependency} +import firrtl.options.Viewer.view +import chipyard.stage._ +import freechips.rocketchip.system.TestGeneration + +trait MakefragSnippet { self: Annotation => + def toMakefrag: String +} + +case class CustomMakefragSnippet(val toMakefrag: String) extends NoTargetAnnotation with MakefragSnippet with Unserializable + +/** Generates a make script to run tests in [[RocketTestSuiteAnnotation]]. */ +class GenerateTestSuiteMakefrags extends Phase with HasChipyardStageUtils { + + // Our annotations tend not to be serializable, but are not marked as such. + override val prerequisites = Seq(Dependency[chipyard.stage.phases.GenerateFirrtlAnnos], + Dependency[chipyard.stage.phases.AddDefaultTests]) + + override def transform(annotations: AnnotationSeq): AnnotationSeq = { + val targetDir = view[StageOptions](annotations).targetDir + val fileName = s"${view[ChipyardOptions](annotations).longName.get}.d" + + val makefragBuilder = new mutable.StringBuilder() + val outputAnnotations = annotations.flatMap { + case ChipyardTestSuiteAnnotation(tests) => + // Unfortunately the gen method of TestGeneration is rocketchip package + // private, so we either have to copy code in or use the stateful form + TestGeneration.addSuites(tests) + None + case a: MakefragSnippet => + makefragBuilder :+ ("\n" + a.toMakefrag) + None + case a => Some(a) + } + writeOutputFile(targetDir, fileName, TestGeneration.generateMakeFrag ++ makefragBuilder.toString) + outputAnnotations + } + + override final def invalidates(a: Phase): Boolean = false +} diff --git a/tools/stage-chisel7/src/main/scala/phases/PreElaboration.scala b/tools/stage-chisel7/src/main/scala/phases/PreElaboration.scala new file mode 100644 index 0000000000..646d54c3a0 --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/phases/PreElaboration.scala @@ -0,0 +1,43 @@ +// See LICENSE + +package chipyard.stage.phases + +import chisel3.RawModule +import chisel3.stage.ChiselGeneratorAnnotation +import firrtl.AnnotationSeq +import firrtl.options.Viewer.view +import firrtl.options.{Dependency, Phase, StageOptions} +import org.chipsalliance.cde.config.{Field, Parameters} +import freechips.rocketchip.diplomacy._ +import chipyard.stage._ + +case object TargetDirKey extends Field[String](".") + +/** Constructs a generator function that returns a top module with given config parameters */ +class PreElaboration extends Phase with PreservesAll with HasChipyardStageUtils { + + override val prerequisites = Seq(Dependency[Checks]) + override val optionalPrerequisiteOf = Seq(Dependency[chisel3.stage.phases.Elaborate]) + + override def transform(annotations: AnnotationSeq): AnnotationSeq = { + + val stageOpts = view[StageOptions](annotations) + val rOpts = view[ChipyardOptions](annotations) + val topMod = rOpts.topModule.get + + val config = getConfig(rOpts.configNames.get).alterPartial { + case TargetDirKey => stageOpts.targetDir + } + + val gen = () => + topMod + .getConstructor(classOf[Parameters]) + .newInstance(config) match { + case a: RawModule => a + case a: LazyModule => LazyModule(a).module + } + + ChiselGeneratorAnnotation(gen) +: annotations + } + +} diff --git a/tools/stage-chisel7/src/main/scala/phases/PreservesAll.scala b/tools/stage-chisel7/src/main/scala/phases/PreservesAll.scala new file mode 100644 index 0000000000..30884be4b4 --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/phases/PreservesAll.scala @@ -0,0 +1,8 @@ +package chipyard.stage.phases + +import firrtl.AnnotationSeq +import firrtl.options.{Dependency, DependencyManagerException, Phase, PhaseManager} + +trait PreservesAll { this: Phase => + override def invalidates(phase: Phase) = false +} diff --git a/tools/stage-chisel7/src/main/scala/phases/TransformAnnotations.scala b/tools/stage-chisel7/src/main/scala/phases/TransformAnnotations.scala new file mode 100644 index 0000000000..7164084171 --- /dev/null +++ b/tools/stage-chisel7/src/main/scala/phases/TransformAnnotations.scala @@ -0,0 +1,21 @@ +// See LICENSE + +package chipyard.stage.phases + +import chisel3.stage.ChiselOutputFileAnnotation +import firrtl.AnnotationSeq +import firrtl.options.Viewer.view +import firrtl.options.{Dependency, Phase} +import chipyard.stage._ + +/** Transforms RocketChipAnnotations into those used by other stages */ +class TransformAnnotations extends Phase with PreservesAll with HasChipyardStageUtils { + + override val prerequisites = Seq(Dependency[Checks]) + override val optionalPrerequisiteOf = Seq(Dependency[chisel3.stage.phases.AddImplicitOutputFile]) + + override def transform(annotations: AnnotationSeq): AnnotationSeq = { + /** Construct output file annotation for emission */ + new ChiselOutputFileAnnotation(view[ChipyardOptions](annotations).longName.get) +: annotations + } +} diff --git a/variables.mk b/variables.mk index e8c992c4ef..a1de529488 100644 --- a/variables.mk +++ b/variables.mk @@ -28,6 +28,7 @@ HELP_COMPILATION_VARIABLES = \ " See https://www.scala-sbt.org/1.x/docs/Command-Line-Reference.html\#Command+Line+Options" \ " SBT = if overridden, used to invoke sbt (default is to invoke sbt by sbt-launch.jar)" \ " FIRTOOL_BIN = path to CIRCT firtool (default: 'firtool' in PATH)" \ +" USE_CHISEL7 = EXPERIMENTAL: set to '1' to build with Chisel 7" \ HELP_PROJECT_VARIABLES = \ " SUB_PROJECT = use the specific subproject default variables [$(SUB_PROJECT)]" \