Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
cba888a
feat: Add Python logging integration for subprocess execution
tafk7 Nov 6, 2025
8093e95
feat: Add hierarchical logging control and subprocess logger names
tafk7 Nov 6, 2025
71e22a8
disable daemon for threading
tafk7 Nov 6, 2025
960b5ee
refactor: Replace stdout/stderr redirection with logging
tafk7 Nov 6, 2025
67d956f
refactor: Convert shell script execution to direct subprocess calls
tafk7 Nov 6, 2025
27c223c
refactor: Remove use_logging parameter from launch_process_helper
tafk7 Nov 6, 2025
3dac7c1
feat: Add show_progress flag for independent builder console output c…
tafk7 Nov 6, 2025
5a8bd6d
feat: Add subprocess_log_levels config for fine-grained logging control
tafk7 Nov 6, 2025
e8a75fc
feat: Separate console and file logging controls for subprocess tools
tafk7 Nov 6, 2025
aad66d5
fix: Set subprocess logger level based on active handlers only
tafk7 Nov 7, 2025
7376e3e
Merge branch 'custom/transformer' into feature/logging-integration-tr…
tafk7 Nov 7, 2025
231cb84
Set default subprocess console level to error
tafk7 Nov 7, 2025
e6e97f6
refactor: Improve subprocess logging and debugging infrastructure
tafk7 Nov 7, 2025
385029d
Linting pass
tafk7 Nov 7, 2025
5b9dce4
Merge branch 'custom/transformer' into feature/logging-integration-tr…
tafk7 Nov 7, 2025
2336223
Leverage backend check functions in specialize layers
tafk7 Nov 9, 2025
12cd51f
Lower minor info messages to debug
tafk7 Nov 9, 2025
03d81c5
Automate brainsmith integration point
tafk7 Nov 9, 2025
8b0adcb
Various fixes for FINNLoop-KernelOp compatability
tafk7 Nov 10, 2025
eb9bf2f
Remove redudnant modelwrapping
tafk7 Nov 14, 2025
645cf54
Remove default env var parsing
tafk7 Nov 14, 2025
7886c54
fix for now?
Nov 15, 2025
42da713
convert to use new is_fpgadataflow_node op
Nov 15, 2025
317d859
revert my other changes
Nov 15, 2025
4d8e833
whitespace fix
Nov 15, 2025
63fb86b
Merge pull request #6 from jsmonson/feature/logging-integration-trans…
tafk7 Nov 15, 2025
28f5add
Add kernelop support to RoundAndClipThresholds
tafk7 Nov 24, 2025
8f57f5d
fix: correct FINNLoop code generation order and FIFO handling
tafk7 Nov 24, 2025
d499f69
Add ElementwiseBinaryOp_hls to mlo ops
tafk7 Nov 25, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 0 additions & 67 deletions src/finn/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,73 +14,6 @@
on FPGAs and other accelerators.
"""

import importlib.util
import os
import warnings
from pathlib import Path


def _setup_environment():
"""Configure FINN environment variables on import."""

# 1. Determine FINN_ROOT (auto-detect if not set)
finn_root = os.environ.get("FINN_ROOT")
if not finn_root:
try:
finn_spec = importlib.util.find_spec("finn")
if finn_spec and finn_spec.origin:
finn_init_path = Path(finn_spec.origin).resolve()
finn_root = str(finn_init_path.parent.parent.parent)
os.environ["FINN_ROOT"] = finn_root
else:
raise RuntimeError("Could not find finn module spec")
except Exception as e:
warnings.warn(
f"FINN_ROOT environment variable is not set and could not be inferred: {e}\n"
"This may cause issues with certain FINN operations. "
"Please set FINN_ROOT to the root directory of your FINN installation."
)
return

# 2. Set FINN_DEPS_DIR (default to {FINN_ROOT}/deps if not set)
if not os.environ.get("FINN_DEPS_DIR"):
default_deps_dir = Path(finn_root) / "deps"
os.environ["FINN_DEPS_DIR"] = str(default_deps_dir)
if not default_deps_dir.exists():
warnings.warn(
f"FINN_DEPS_DIR set to {default_deps_dir}, but directory does not exist yet. "
"Dependencies will need to be fetched before some operations can work. "
"Run ./fetch-repos.sh or use the Docker container for full functionality."
)

# 3. Configure LD_LIBRARY_PATH for Xilinx tools
ld_library_path = os.environ.get("LD_LIBRARY_PATH", "")
paths_to_add = []

# Vivado libraries
if vivado_path := os.environ.get("XILINX_VIVADO"):
if (vivado_lib := Path(vivado_path) / "lib" / "lnx64.o").exists():
paths_to_add.append(str(vivado_lib))
if (system_lib := Path("/lib/x86_64-linux-gnu")).exists():
paths_to_add.append(str(system_lib))

# Vitis FPO libraries
if vitis_path := os.environ.get("VITIS_PATH"):
if (vitis_fpo := Path(vitis_path) / "lnx64" / "tools" / "fpo_v7_1").exists():
paths_to_add.append(str(vitis_fpo))

# Update LD_LIBRARY_PATH
if paths_to_add:
existing_paths = ld_library_path.split(":") if ld_library_path else []
for path in paths_to_add:
if path not in existing_paths:
existing_paths.append(path)
os.environ["LD_LIBRARY_PATH"] = ":".join(existing_paths)


# Configure environment on import
_setup_environment()

# Version information
try:
from ._version import version as __version__
Expand Down
165 changes: 116 additions & 49 deletions src/finn/builder/build_dataflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,25 +43,6 @@
from finn.builder.build_dataflow_steps import build_dataflow_step_lookup


# adapted from https://stackoverflow.com/a/39215961
class StreamToLogger(object):
"""
Fake file-like stream object that redirects writes to a logger instance.
"""

def __init__(self, logger, level):
self.logger = logger
self.level = level
self.linebuf = ""

def write(self, buf):
for line in buf.rstrip().splitlines():
self.logger.log(self.level, line.rstrip())

def flush(self):
pass


def resolve_build_steps(cfg: DataflowBuildConfig, partial: bool = True):
steps = cfg.steps
if steps is None:
Expand Down Expand Up @@ -108,84 +89,170 @@ def build_dataflow_cfg(model_filename, cfg: DataflowBuildConfig):
:param model_filename: ONNX model filename to build
:param cfg: Build configuration
"""
# Set up builder logger for user-facing status messages
builder_log = logging.getLogger("finn.builder")

# if start_step is specified, override the input model
if cfg.start_step is None:
print("Building dataflow accelerator from " + model_filename)
builder_log.debug("Building dataflow accelerator from " + model_filename)
model = ModelWrapper(model_filename)
else:
intermediate_model_filename = resolve_step_filename(cfg.start_step, cfg, -1)
print(
"Building dataflow accelerator from intermediate checkpoint"
builder_log.debug(
"Building dataflow accelerator from intermediate checkpoint "
+ intermediate_model_filename
)
model = ModelWrapper(intermediate_model_filename)
assert type(model) is ModelWrapper
finn_build_dir = os.environ["FINN_BUILD_DIR"]

print("Intermediate outputs will be generated in " + finn_build_dir)
print("Final outputs will be generated in " + cfg.output_dir)
print("Build log is at " + cfg.output_dir + "/build_dataflow.log")
builder_log.debug("Intermediate outputs will be generated in " + finn_build_dir)
builder_log.debug("Final outputs will be generated in " + cfg.output_dir)
builder_log.debug("Build log is at " + cfg.output_dir + "/build_dataflow.log")
# create the output dir if it doesn't exist
if not os.path.exists(cfg.output_dir):
os.makedirs(cfg.output_dir)
step_num = 1
time_per_step = dict()
build_dataflow_steps = resolve_build_steps(cfg)
# set up logger

# Set up root logger with file handler for audit trail
logging.basicConfig(
level=logging.DEBUG,
format="[%(asctime)s] %(message)s",
format="[%(asctime)s] [%(name)s] %(levelname)s: %(message)s",
filename=cfg.output_dir + "/build_dataflow.log",
filemode="a",
)
log = logging.getLogger("build_dataflow")
stdout_logger = StreamToLogger(log, logging.INFO)
stderr_logger = StreamToLogger(log, logging.ERROR)
stdout_orig = sys.stdout
stderr_orig = sys.stderr

# Configure finn.builder logger (progress messages) - controlled by show_progress
builder_logger = logging.getLogger("finn.builder")
builder_logger.setLevel(logging.INFO)
if cfg.show_progress:
# Show progress messages on console with clean formatting
builder_console = logging.StreamHandler(sys.stdout)
builder_console.setFormatter(logging.Formatter("%(message)s"))
builder_logger.addHandler(builder_console)
# Add file handler for audit trail (match root logger format for consistency)
builder_file = logging.FileHandler(cfg.output_dir + "/build_dataflow.log", mode="a")
builder_file.setFormatter(
logging.Formatter("[%(asctime)s] [%(name)s] %(levelname)s: %(message)s")
)
builder_logger.addHandler(builder_file)
# Don't propagate to finn parent (we handle both console and file locally)
builder_logger.propagate = False

# Configure finn tool loggers (subprocess output) - controlled by verbose
finn_logger = logging.getLogger("finn")
finn_logger.setLevel(logging.DEBUG) # Permissive parent (children can filter)

# Add console handler if verbose mode
if cfg.verbose:
finn_console_handler = logging.StreamHandler(sys.stdout)
console_formatter = logging.Formatter("[%(name)s] %(levelname)s: %(message)s")
finn_console_handler.setFormatter(console_formatter)
finn_console_handler.setLevel(logging.ERROR)
finn_logger.addHandler(finn_console_handler)

# Always propagate to file (via root logger)
finn_logger.propagate = True

# Apply subprocess log level overrides (console and file independently)
# Collect all categories from both configs
all_categories = set()
if cfg.subprocess_console_levels:
all_categories.update(cfg.subprocess_console_levels.keys())
if cfg.subprocess_log_levels:
all_categories.update(cfg.subprocess_log_levels.keys())

configured_logger_names = []
for category in all_categories:
logger_name = f"finn.{category}"
configured_logger_names.append(logger_name)
subprocess_logger = logging.getLogger(logger_name)

# Determine console level (default: ERROR - minimize console spam)
console_level = (cfg.subprocess_console_levels or {}).get(category, logging.ERROR)
# Determine file level (default: DEBUG for comprehensive audit trail)
file_level = (cfg.subprocess_log_levels or {}).get(category, logging.DEBUG)

# Set logger level to minimum needed by active destinations
# When verbose=False, console_level is irrelevant (no console handler exists)
if cfg.verbose:
subprocess_logger.setLevel(min(console_level, file_level))
else:
subprocess_logger.setLevel(file_level)

# Add child-specific console handler (when verbose)
if cfg.verbose:
child_console_handler = logging.StreamHandler(sys.stdout)
child_console_handler.setFormatter(console_formatter)
child_console_handler.setLevel(console_level)
subprocess_logger.addHandler(child_console_handler)

# Always propagate to root for file logging
subprocess_logger.propagate = True

# Add filter to parent console handler to exclude configured children
# (prevents duplication for any children that DO propagate)
if cfg.verbose and configured_logger_names:

class ExcludeConfiguredLoggersFilter(logging.Filter):
def filter(self, record):
# Block messages from configured subprocess loggers
return not any(record.name.startswith(name) for name in configured_logger_names)

finn_console_handler.addFilter(ExcludeConfiguredLoggersFilter())

for transform_step in build_dataflow_steps:
try:
step_name = transform_step.__name__
print("Running step: %s [%d/%d]" % (step_name, step_num, len(build_dataflow_steps)))
# redirect output to logfile
if not cfg.verbose and not cfg.no_stdout_redirect:
sys.stdout = stdout_logger
sys.stderr = stderr_logger
# also log current step name to logfile
print("Running step: %s [%d/%d]" % (step_name, step_num, len(build_dataflow_steps)))
builder_log.info(
"Running step: %s [%d/%d]" % (step_name, step_num, len(build_dataflow_steps))
)
# run the step
step_start = time.time()
model = transform_step(model, cfg)
step_end = time.time()
# restore stdout/stderr
sys.stdout = stdout_orig
sys.stderr = stderr_orig
time_per_step[step_name] = step_end - step_start
chkpt_name = "%s.onnx" % (step_name)
chkpt_name = "%d_%s.onnx" % (step_num, step_name)
if cfg.save_intermediate_models:
intermediate_model_dir = cfg.output_dir + "/intermediate_models"
if not os.path.exists(intermediate_model_dir):
os.makedirs(intermediate_model_dir)
model.save("%s/%s" % (intermediate_model_dir, chkpt_name))

# Save FINNLoop bodies as separate checkpoints for debugging MLO
loop_nodes = model.get_nodes_by_op_type("FINNLoop")
if loop_nodes:
from finn.util.basic import getHWCustomOp
for loop_idx, loop_node in enumerate(loop_nodes):
try:
loop_inst = getHWCustomOp(loop_node, model)
loop_body = loop_inst.get_nodeattr("body")
loop_chkpt_name = "%d_%s_loop_%d_%s.onnx" % (
step_num, step_name, loop_idx, loop_node.name
)
loop_body.save("%s/%s" % (intermediate_model_dir, loop_chkpt_name))
except Exception as e:
builder_log.warning(
f"Could not save FINNLoop body for {loop_node.name}: {e}"
)
step_num += 1
except: # noqa
# restore stdout/stderr
sys.stdout = stdout_orig
sys.stderr = stderr_orig
# print exception info and traceback
extype, value, tb = sys.exc_info()
traceback.print_exc()
# start postmortem debug if configured
if cfg.enable_build_pdb_debug:
pdb.post_mortem(tb)
else:
print("enable_build_pdb_debug not set in build config, exiting...")
print("Build failed")
builder_log.error("enable_build_pdb_debug not set in build config, exiting...")
builder_log.error("Build failed")
return -1

with open(cfg.output_dir + "/time_per_step.json", "w") as f:
json.dump(time_per_step, f, indent=2)
print("Completed successfully")
builder_log.info("Completed successfully")
return 0


Expand Down
21 changes: 15 additions & 6 deletions src/finn/builder/build_dataflow_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,14 +336,23 @@ class DataflowBuildConfig:
#: Whether pdb postmortem debuggig will be launched when the build fails
enable_build_pdb_debug: Optional[bool] = True

#: When True, all warnings and compiler output will be printed in stdout.
#: Otherwise, these will be suppressed and only appear in the build log.
#: Show subprocess tool output on console. When False, tools are silent.
#: Use subprocess_console_levels to control per-tool verbosity when True.
verbose: Optional[bool] = False

#: When True, stdout/stderr will not be redirected even when verbose=False.
#: Useful for applications using terminal-aware libraries (e.g., Rich, tqdm)
#: that require direct terminal access and break with stream redirection.
no_stdout_redirect: Optional[bool] = False
#: Show build progress messages on console. When False, console is silent.
#: Recommended True for interactive builds, False for library/batch mode.
show_progress: Optional[bool] = True

#: Per-tool console log levels (only when verbose=True, otherwise ignored).
#: Dict of {category: level}, e.g. {"hls": logging.ERROR, "vivado": logging.INFO}.
#: Unconfigured tools default to WARNING. Supports hierarchical: "vivado.stitch_ip".
subprocess_console_levels: Optional[dict] = None

#: Per-tool log file levels (always applies, independent of verbose).
#: Dict of {category: level}, e.g. {"hls": logging.INFO, "vivado": logging.DEBUG}.
#: Unconfigured tools default to DEBUG (comprehensive audit trail).
subprocess_log_levels: Optional[dict] = None

#: If given, only run the steps in the list. If not, run default steps.
#: See `default_build_dataflow_steps` for the default list of steps.
Expand Down
Loading