Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -69,3 +69,11 @@ repos:
# black-compatible flake-8 config
args: ['--max-line-length=100', # black default
'--extend-ignore=E203'] # E203 is not PEP8 compliant

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.19.1
hooks:
- id: mypy
args: [--config-file=mypy.ini]
files: ^src/finn/util/.*\.py$ # Only check util. directory for now
exclude: qnn-data # python doesnt allow hyphens in package names
12 changes: 12 additions & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
[mypy]
python_version = 3.12
strict = True
warn_return_any = True
warn_unused_configs = True
disallow_untyped_defs = False
ignore_missing_imports = True

[mypy-finn.util.*]
disallow_untyped_defs = True
disallow_incomplete_defs = True
check_untyped_defs = True
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ dataclasses-json==0.5.7
gspread==3.6.0
importlib-resources==6.1.0
ipython==8.12.2
mypy>=1.19.1
numpy==1.24.1
onnx==1.17.0
onnxoptimizer
Expand Down
77 changes: 44 additions & 33 deletions src/finn/util/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,13 @@
import sys
import tempfile
from qonnx.util.basic import roundup_to_integer_multiple
from typing import Dict, List, Optional, Tuple

# test boards used for bnn pynq tests
test_board_map = ["Pynq-Z1", "KV260_SOM", "ZCU104", "U250"]
test_board_map: List[str] = ["Pynq-Z1", "KV260_SOM", "ZCU104", "U250"]

# mapping from PYNQ board names to FPGA part names
pynq_part_map = dict()
pynq_part_map: Dict[str, str] = dict()
pynq_part_map["Ultra96"] = "xczu3eg-sbva484-1-e"
pynq_part_map["Ultra96-V2"] = "xczu3eg-sbva484-1-i"
pynq_part_map["Pynq-Z1"] = "xc7z020clg400-1"
Expand All @@ -51,7 +52,7 @@


# native AXI HP port width (in bits) for PYNQ boards
pynq_native_port_width = dict()
pynq_native_port_width: Dict[str, int] = dict()
pynq_native_port_width["Pynq-Z1"] = 64
pynq_native_port_width["Pynq-Z2"] = 64
pynq_native_port_width["Ultra96"] = 128
Expand All @@ -65,28 +66,28 @@
pynq_native_port_width["AUP-ZU3_8GB"] = 128

# Alveo device and platform mappings
alveo_part_map = dict()
alveo_part_map: Dict[str, str] = dict()
alveo_part_map["U50"] = "xcu50-fsvh2104-2L-e"
alveo_part_map["U200"] = "xcu200-fsgd2104-2-e"
alveo_part_map["U250"] = "xcu250-figd2104-2L-e"
alveo_part_map["U280"] = "xcu280-fsvh2892-2L-e"
alveo_part_map["U55C"] = "xcu55c-fsvh2892-2L-e"

alveo_default_platform = dict()
alveo_default_platform: Dict[str, str] = dict()
alveo_default_platform["U50"] = "xilinx_u50_gen3x16_xdma_5_202210_1"
alveo_default_platform["U200"] = "xilinx_u200_gen3x16_xdma_2_202110_1"
alveo_default_platform["U250"] = "xilinx_u250_gen3x16_xdma_2_1_202010_1"
alveo_default_platform["U280"] = "xilinx_u280_gen3x16_xdma_1_202211_1"
alveo_default_platform["U55C"] = "xilinx_u55c_gen3x16_xdma_3_202210_1"

# Create a joint part map, encompassing other boards too
part_map = {**pynq_part_map, **alveo_part_map}
part_map: Dict[str, str] = {**pynq_part_map, **alveo_part_map}
part_map["VEK280"] = "xcve2802-vsvh1760-2MP-e-S"
part_map["VCK190"] = "xcvc1902-vsva2197-2MP-e-S"
part_map["V80"] = "xcv80-lsva4737-2MHP-e-s"


def get_rtlsim_trace_depth():
def get_rtlsim_trace_depth() -> int:
"""Return the trace depth for rtlsim. Controllable
via the RTLSIM_TRACE_DEPTH environment variable. If the env.var. is
undefined, the default value of 1 is returned. A trace depth of 1
Expand All @@ -105,7 +106,7 @@ def get_rtlsim_trace_depth():
return 1


def get_finn_root():
def get_finn_root() -> str:
"Return the root directory that FINN is cloned into."

try:
Expand All @@ -118,7 +119,7 @@ def get_finn_root():
)


def get_vivado_root():
def get_vivado_root() -> str:
"Return the root directory that Vivado is installed into."

try:
Expand All @@ -131,14 +132,14 @@ def get_vivado_root():
)


def get_liveness_threshold_cycles():
def get_liveness_threshold_cycles() -> int:
"""Return the number of no-output cycles rtlsim will wait before assuming
the simulation is not finishing and throwing an exception."""

return int(os.getenv("LIVENESS_THRESHOLD", 1000000))


def make_build_dir(prefix=""):
def make_build_dir(prefix: str = "") -> str:
"""Creates a folder with given prefix to be used as a build dir.
Use this function instead of tempfile.mkdtemp to ensure any generated files
will survive on the host after the FINN Docker container exits."""
Expand All @@ -159,27 +160,27 @@ class CppBuilder:
"""Builds the g++ compiler command to produces the executable of the c++ code
in code_gen_dir which is passed to the function build() of this class."""

def __init__(self):
self.include_paths = []
self.cpp_files = []
self.executable_path = ""
self.code_gen_dir = ""
self.compile_components = []
self.compile_script = ""
def __init__(self) -> None:
self.include_paths: List[str] = []
self.cpp_files: List[str] = []
self.executable_path: str = ""
self.code_gen_dir: str = ""
self.compile_components: List[str] = []
self.compile_script: str = ""

def append_includes(self, library_path):
def append_includes(self, library_path: str) -> None:
"""Adds given library path to include_paths list."""
self.include_paths.append(library_path)

def append_sources(self, cpp_file):
def append_sources(self, cpp_file: str) -> None:
"""Adds given c++ file to cpp_files list."""
self.cpp_files.append(cpp_file)

def set_executable_path(self, path):
def set_executable_path(self, path: str) -> None:
"""Sets member variable "executable_path" to given path."""
self.executable_path = path

def build(self, code_gen_dir):
def build(self, code_gen_dir: str) -> None:
"""Builds the g++ compiler command according to entries in include_paths
and cpp_files lists. Saves it in bash script in given folder and
executes it."""
Expand All @@ -202,7 +203,9 @@ def build(self, code_gen_dir):
process_compile.communicate()


def launch_process_helper(args, proc_env=None, cwd=None):
def launch_process_helper(
args: List[str], proc_env: Optional[Dict[str, str]] = None, cwd: Optional[str] = None
) -> Tuple[str, str]:
"""Helper function to launch a process in a way that facilitates logging
stdout/stderr with Python loggers.
Returns (cmd_out, cmd_err)."""
Expand All @@ -211,22 +214,26 @@ def launch_process_helper(args, proc_env=None, cwd=None):
with subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=proc_env, cwd=cwd
) as proc:
(cmd_out, cmd_err) = proc.communicate()
(cmd_out_bytes, cmd_err_bytes) = proc.communicate()

cmd_out = ""
cmd_err = ""

if cmd_out is not None:
cmd_out = cmd_out.decode("utf-8")
cmd_out = cmd_out_bytes.decode("utf-8")
sys.stdout.write(cmd_out)
if cmd_err is not None:
cmd_err = cmd_err.decode("utf-8")
cmd_err = cmd_err_bytes.decode("utf-8")
sys.stderr.write(cmd_err)
return (cmd_out, cmd_err)


def which(program):
def which(program: str) -> Optional[str]:
"Python equivalent of the shell cmd 'which'."

# source:
# https://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def is_exe(fpath):
def is_exe(fpath: str) -> bool:
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)

fpath, fname = os.path.split(program)
Expand Down Expand Up @@ -255,8 +262,10 @@ def is_exe(fpath):


def get_memutil_alternatives(
req_mem_spec, mem_primitives=mem_primitives_versal, sort_min_waste=True
):
req_mem_spec: Tuple[int, int],
mem_primitives: Dict[str, Tuple[int, int]] = mem_primitives_versal,
sort_min_waste: bool = True,
) -> List[Tuple[str, Tuple[int, float, int]]]:
"""Computes how many instances of a memory primitive are necessary to
implement a desired memory size, where req_mem_spec is the desired
size and the primitive_spec is the primitve size. The sizes are expressed
Expand All @@ -275,7 +284,9 @@ def get_memutil_alternatives(
return ret


def memutil(req_mem_spec, primitive_spec):
def memutil(
req_mem_spec: Tuple[int, int], primitive_spec: Tuple[int, int]
) -> Tuple[int, float, int]:
"""Computes how many instances of a memory primitive are necessary to
implemented a desired memory size, where req_mem_spec is the desired
size and the primitive_spec is the primitve size. The sizes are expressed
Expand All @@ -296,15 +307,15 @@ def memutil(req_mem_spec, primitive_spec):
return (count, eff, waste)


def is_versal(fpgapart):
def is_versal(fpgapart: str) -> bool:
"""Returns whether board is part of the Versal family"""
return fpgapart[0:4] in ["xcvc", "xcve", "xcvp", "xcvm", "xqvc", "xqvm"] or fpgapart[0:5] in [
"xqrvc",
"xcv80",
]


def get_dsp_block(fpgapart):
def get_dsp_block(fpgapart: str) -> str:
if is_versal(fpgapart):
return "DSP58"
elif fpgapart[2] == "7":
Expand Down
23 changes: 15 additions & 8 deletions src/finn/util/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,14 @@
import json
import onnx
from qonnx.custom_op.registry import getCustomOp, is_custom_op
from typing import Any, Dict, List, Optional


# update this code to handle export configs from subgraphs
# where the subgraph is found in a node's attribute as a graph type
def extract_model_config(model, subgraph_hier, attr_names_to_extract):
def extract_model_config(
model: Any, subgraph_hier: Optional[str], attr_names_to_extract: List[str]
) -> Dict[str, Dict[str, Any]]:
"""Create a dictionary with layer name -> attribute mappings extracted from the
model. The created dictionary can be later applied on a model with
qonnx.transform.general.ApplyConfig.
Expand All @@ -29,7 +32,7 @@ def extract_model_config(model, subgraph_hier, attr_names_to_extract):
For example, a node 'Conv_0' inside a subgraph of node 'IfNode_0' will be exported
as 'IfNode_0_Conv_0' in the config."""

cfg = dict()
cfg: Dict[str, Dict[str, Any]] = dict()
cfg["Defaults"] = dict()
for n in model.graph.node:
new_hier = n.name if subgraph_hier is None else str(subgraph_hier) + "_" + n.name
Expand All @@ -38,7 +41,7 @@ def extract_model_config(model, subgraph_hier, attr_names_to_extract):
is_custom = is_custom_op(n.domain, n.op_type)
if is_custom:
oi = getCustomOp(n)
layer_dict = dict()
layer_dict: Dict[str, Any] = dict()
for attr in attr_names_to_extract:
try:
layer_dict[attr] = oi.get_nodeattr(attr)
Expand Down Expand Up @@ -66,7 +69,9 @@ def extract_model_config(model, subgraph_hier, attr_names_to_extract):
return cfg


def extract_model_config_to_json(model, json_filename, attr_names_to_extract):
def extract_model_config_to_json(
model: Any, json_filename: str, attr_names_to_extract: List[str]
) -> None:
"""Create a json file with layer name -> attribute mappings extracted from the
model. The created json file can be later applied on a model with
qonnx.transform.general.ApplyConfig."""
Expand All @@ -81,15 +86,17 @@ def extract_model_config_to_json(model, json_filename, attr_names_to_extract):
)


def extract_model_config_consolidate_shuffles(model, output_file, hw_attrs):
def extract_model_config_consolidate_shuffles(
model: Any, output_file: str, hw_attrs: List[str]
) -> None:
"""Export flow that takes into consideration how Shuffle operations have been decomposed"""
extract_model_config_to_json(model, output_file, hw_attrs)

with open(output_file, "r") as f:
config = json.load(f)
config: Dict[str, Any] = json.load(f)

shuffle_configs = {}
nodes_to_remove = []
shuffle_configs: Dict[str, Dict[str, Any]] = {}
nodes_to_remove: List[str] = []

for node in model.graph.node:
if node.op_type in ["InnerShuffle_rtl", "OuterShuffle_hls"]:
Expand Down
15 changes: 8 additions & 7 deletions src/finn/util/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,13 @@
gen_finn_dt_tensor,
qonnx_make_model,
)
from typing import Any, Dict, List, Optional


def hls_random_mlp_maker(layer_spec):
def hls_random_mlp_maker(layer_spec: List[Dict[str, Any]]) -> ModelWrapper:
"""Create an MLP of given specification using HLSCustomOp instances.
Generate random weights/thresholds of appropriate size."""
ret = []
ret: List[Dict[str, Any]] = []
for lyr in layer_spec:
idt = lyr["idt"]
wdt = lyr["wdt"]
Expand All @@ -50,7 +51,7 @@ def hls_random_mlp_maker(layer_spec):
lyr["W"] = gen_finn_dt_tensor(wdt, (mw, mh))
if act is None:
# no activation, produce accumulators
T = None
T: Optional[np.ndarray[Any, Any]] = None
tdt = None
if wdt == DataType["BIPOLAR"] and idt == DataType["BIPOLAR"]:
odt = DataType["UINT32"]
Expand Down Expand Up @@ -79,12 +80,12 @@ def hls_random_mlp_maker(layer_spec):
return hls_mlp_maker(ret)


def hls_mlp_maker(layer_spec):
def hls_mlp_maker(layer_spec: List[Dict[str, Any]]) -> ModelWrapper:
"""Create an MLP of given specification using HLSCustomOp instances."""

current_in_name = ""
current_out_name = ""
i = 0
current_in_name: str = ""
current_out_name: str = ""
i: int = 0

graph = helper.make_graph(nodes=[], name="mlp", inputs=[], outputs=[])

Expand Down
Loading