Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions .github/workflows/regress.yml
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,17 @@ jobs:
uses: ./.github/actions/singularity-setup
- name: Generate c_header code
run: ./do gen:c_header
regress-gen-sverilog:
runs-on: ubuntu-latest
env:
SINGULARITY: 1
steps:
- name: Clone Github Repo Action
uses: actions/checkout@v4
- name: singularity setup
uses: ./.github/actions/singularity-setup
- name: Generate sverilog_header code
run: ./do gen:sverilog
regress-cpp-unit:
runs-on: ubuntu-latest
env:
Expand Down
121 changes: 1 addition & 120 deletions backends/generators/c_header/generate_encoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import logging
import argparse
import yaml
import json

# Add parent directory to path to import generator.py
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
Expand All @@ -18,6 +17,7 @@
from generator import (
load_instructions,
load_csrs,
load_exception_codes,
parse_match,
parse_extension_requirements,
)
Expand All @@ -30,125 +30,6 @@ def calculate_mask(match_str):
return int("".join("0" if c == "-" else "1" for c in match_str), 2)


def load_exception_codes(
ext_dir, enabled_extensions=None, include_all=False, resolved_codes_file=None
):
"""Load exception codes from extension YAML files or pre-resolved JSON file."""
exception_codes = []
found_extensions = 0
found_files = 0

if enabled_extensions is None:
enabled_extensions = []

# If we have a resolved codes file, use it instead of processing YAML files
if resolved_codes_file and os.path.exists(resolved_codes_file):
try:
with open(resolved_codes_file, encoding="utf-8") as f:
resolved_codes = json.load(f)

for code in resolved_codes:
num = code.get("num")
name = code.get("name")
if num is not None and name is not None:
sanitized_name = (
name.lower()
.replace(" ", "_")
.replace("/", "_")
.replace("-", "_")
)
exception_codes.append((num, sanitized_name))

logging.info(
f"Loaded {len(exception_codes)} pre-resolved exception codes from {resolved_codes_file}"
)

# Sort by exception code number and deduplicate
seen_nums = set()
unique_codes = []
for num, name in sorted(exception_codes, key=lambda x: x[0]):
if num not in seen_nums:
seen_nums.add(num)
unique_codes.append((num, name))

return unique_codes

except Exception as e:
logging.error(
f"Error loading resolved codes file {resolved_codes_file}: {e}"
)
# Fall back to processing YAML files

for dirpath, _, filenames in os.walk(ext_dir):
for fname in filenames:
if not fname.endswith(".yaml"):
continue

found_files += 1
path = os.path.join(dirpath, fname)

try:
with open(path, encoding="utf-8") as f:
data = yaml.safe_load(f)

if not isinstance(data, dict) or data.get("kind") != "extension":
continue

found_extensions += 1
ext_name = data.get("name", "unnamed")

# Skip extension filtering if include_all is True
if not include_all:
# Filter by extension requirements
definedBy = data.get("definedBy")
if definedBy:
meets_req = parse_extension_requirements(definedBy)
if not meets_req(enabled_extensions):
continue

# Check if excluded
excludedBy = data.get("excludedBy")
if excludedBy:
exclusion_check = parse_extension_requirements(excludedBy)
if exclusion_check(enabled_extensions):
continue

# Get exception codes
for code in data.get("exception_codes", []):
num = code.get("num")
name = code.get("name")

if num is not None and name is not None:
sanitized_name = (
name.lower()
.replace(" ", "_")
.replace("/", "_")
.replace("-", "_")
)
exception_codes.append((num, sanitized_name))

except Exception as e:
logging.error(f"Error processing file {path}: {e}")

if found_extensions > 0:
logging.info(
f"Found {found_extensions} extension definitions in {found_files} files"
)
logging.info(f"Added {len(exception_codes)} exception codes to the output")
else:
logging.warning(f"No extension definitions found in {ext_dir}")

# Sort by exception code number and deduplicate
seen_nums = set()
unique_codes = []
for num, name in sorted(exception_codes, key=lambda x: x[0]):
if num not in seen_nums:
seen_nums.add(num)
unique_codes.append((num, name))

return unique_codes


def extract_instruction_fields(instructions):
"""Extract field names and their positions from instruction definitions."""
field_dict = {}
Expand Down
132 changes: 124 additions & 8 deletions backends/generators/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import yaml
import logging
import pprint
import json

pp = pprint.PrettyPrinter(indent=2)
logging.basicConfig(level=logging.INFO, format="%(levelname)s:: %(message)s")
Expand Down Expand Up @@ -326,14 +327,15 @@ def load_instructions(

# Process RV64 encoding
rv64_match = rv64_encoding.get("match")
rv32_match = rv32_encoding.get("match")

if rv64_match:
instr_dict[name] = {
"match": rv64_match
} # RV64 gets the default name

# Process RV32 encoding with a _rv32 suffix
rv32_match = rv32_encoding.get("match")
if rv32_match:
if rv32_match and rv32_match != rv64_match:
# Process RV32 encoding with a _rv32 suffix
instr_dict[f"{name}_rv32"] = {"match": rv32_match}

continue # Skip the rest of the loop as we've already added the encodings
Expand Down Expand Up @@ -491,11 +493,7 @@ def load_csrs(csr_root, enabled_extensions, include_all=False, target_arch="RV64
else:
addr_int = int(addr_to_use, 0)

# For BOTH architecture, add suffix to RV32-specific CSRs
if target_arch == "BOTH" and base == 32:
csrs[addr_int] = f"{name.upper()}.RV32"
else:
csrs[addr_int] = name.upper()
csrs[addr_int] = name.upper()
except Exception as e:
logging.error(f"Error parsing address {addr_to_use} in {path}: {e}")
address_errors += 1
Expand All @@ -518,6 +516,124 @@ def load_csrs(csr_root, enabled_extensions, include_all=False, target_arch="RV64
return csrs


def load_exception_codes(
ext_dir, enabled_extensions=None, include_all=False, resolved_codes_file=None
):
"""Load exception codes from extension YAML files or pre-resolved JSON file."""
exception_codes = []
found_extensions = 0
found_files = 0

if enabled_extensions is None:
enabled_extensions = []
# If we have a resolved codes file, use it instead of processing YAML files
if resolved_codes_file and os.path.exists(resolved_codes_file):
try:
with open(resolved_codes_file, encoding="utf-8") as f:
resolved_codes = json.load(f)

for code in resolved_codes:
num = code.get("num")
name = code.get("name")
if num is not None and name is not None:
sanitized_name = (
name.lower()
.replace(" ", "_")
.replace("/", "_")
.replace("-", "_")
)
exception_codes.append((num, sanitized_name))

logging.info(
f"Loaded {len(exception_codes)} pre-resolved exception codes from {resolved_codes_file}"
)

# Sort by exception code number and deduplicate
seen_nums = set()
unique_codes = []
for num, name in sorted(exception_codes, key=lambda x: x[0]):
if num not in seen_nums:
seen_nums.add(num)
unique_codes.append((num, name))

return unique_codes

except Exception as e:
logging.error(
f"Error loading resolved codes file {resolved_codes_file}: {e}"
)
# Fall back to processing YAML files

for dirpath, _, filenames in os.walk(ext_dir):
for fname in filenames:
if not fname.endswith(".yaml"):
continue

found_files += 1
path = os.path.join(dirpath, fname)

try:
with open(path, encoding="utf-8") as f:
data = yaml.safe_load(f)

if not isinstance(data, dict) or data.get("kind") != "extension":
continue

found_extensions += 1
ext_name = data.get("name", "unnamed")

# Skip extension filtering if include_all is True
if not include_all:
# Filter by extension requirements
definedBy = data.get("definedBy")
if definedBy:
meets_req = parse_extension_requirements(definedBy)
if not meets_req(enabled_extensions):
continue

# Check if excluded
excludedBy = data.get("excludedBy")
if excludedBy:
exclusion_check = parse_extension_requirements(excludedBy)
if exclusion_check(enabled_extensions):
continue

# Get exception codes
for code in data.get("exception_codes", []):
num = code.get("num")
name = code.get("name")

if num is not None and name is not None:
sanitized_name = (
name.lower()
.replace(" ", "_")
.replace("/", "_")
.replace("-", "_")
)
exception_codes.append((num, sanitized_name))

except Exception as e:
logging.error(f"Error processing file {path}: {e}")

if found_extensions > 0:
logging.info(
f"Found {found_extensions} extension definitions in {found_files} files"
)
logging.info(f"Added {len(exception_codes)} exception codes to the output")
else:
logging.warning(f"No extension definitions found in {ext_dir}")

# Sort by exception code number and deduplicate
seen_nums = set()
unique_codes = []
for num, name in sorted(exception_codes, key=lambda x: x[0]):
if num not in seen_nums:
seen_nums.add(num)
unique_codes.append((num, name))

return unique_codes


def parse_match(match_str):
"""
Convert the bit pattern string to an integer.
Expand Down
Loading
Loading