diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 03fcc666fb..33263866cf 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -44,13 +44,23 @@ jobs: - name: Fud Formatting check uses: psf/black@stable with: - options: "--line-length 88" + options: "--line-length 88 --check" src: 'fud' + - name: Calyx-Py Formatting check + uses: psf/black@stable + with: + options: "--line-length 88 --check" + src: 'calyx-py' - name: Systolic Array Formatting check uses: psf/black@stable with: - options: "--line-length 88" + options: "--line-length 88 --check" src: 'frontends/systolic-lang' + - name: Queues Formatting check + uses: psf/black@stable + with: + options: "--line-length 88 --check" + src: 'frontends/queues' - name: Fud Linting check uses: TrueBrain/actions-flake8@master with: diff --git a/calyx-py/calyx/builder.py b/calyx-py/calyx/builder.py index 6b0cda8fc1..e3cdab3c45 100644 --- a/calyx-py/calyx/builder.py +++ b/calyx-py/calyx/builder.py @@ -221,7 +221,9 @@ def case( width = self.infer_width(signal) ifs = [] for branch, controllable in cases.items(): - std_eq = self.eq(width, self.generate_name(f"{signal.name}_eq_{branch}"), signed) + std_eq = self.eq( + width, self.generate_name(f"{signal.name}_eq_{branch}"), signed + ) with self.continuous: std_eq.left = signal diff --git a/calyx-py/test/case.py b/calyx-py/test/case.py index 34d313e74e..de4305a318 100644 --- a/calyx-py/test/case.py +++ b/calyx-py/test/case.py @@ -1,6 +1,7 @@ from calyx.builder import Builder, invoke -#Creates a component the has a case statement. + +# Creates a component the has a case statement. def add_case(prog): # Inputs/Outputs my_comp = prog.component("my_comp") @@ -25,4 +26,3 @@ def build(): if __name__ == "__main__": build().emit() - diff --git a/calyx-py/test/port_attributes.py b/calyx-py/test/port_attributes.py index 1d4f8177fb..ecc5aa824d 100644 --- a/calyx-py/test/port_attributes.py +++ b/calyx-py/test/port_attributes.py @@ -7,8 +7,8 @@ def insert_foo_component(prog): foo_inputs = [ ("in_1", 1), ("in_2", 2, ["data"]), - ("in_3", 2, ["data", ("write_together", 1)]) - ] + ("in_3", 2, ["data", ("write_together", 1)]), + ] cb.add_comp_ports(comp, foo_inputs, []) @@ -18,6 +18,7 @@ def insert_foo_component(prog): comp.output("out_3", 1, ["data", ("done", 1)]) # ANCHOR_END: port_attributes + if __name__ == "__main__": prog = cb.Builder() insert_foo_component(prog) diff --git a/frontends/queues/plot.py b/frontends/queues/plot.py index b3ea212259..44775a2093 100644 --- a/frontends/queues/plot.py +++ b/frontends/queues/plot.py @@ -9,21 +9,17 @@ class Logic(Enum): RR = 1 STRICT = 2 + def append_path_prefix(file): path_to_script = os.path.dirname(__file__) path_to_file = os.path.join(path_to_script, file) return path_to_file + def parse(stat, file): out = { - "binheap" : { - "round_robin" : {}, - "strict" : {} - }, - "specialized" : { - "round_robin" : {}, - "strict" : {} - } + "binheap": {"round_robin": {}, "strict": {}}, + "specialized": {"round_robin": {}, "strict": {}}, } with open(file) as file: @@ -31,22 +27,23 @@ def parse(stat, file): for file, data in data.items(): if isinstance(data, dict): data = data[stat] - - flow_no = file.split('flow')[0][-1] + + flow_no = file.split("flow")[0][-1] if "round_robin" in file: if "binheap" in file: - out["binheap"]["round_robin"][flow_no] = data + out["binheap"]["round_robin"][flow_no] = data else: out["specialized"]["round_robin"][flow_no] = data if "strict" in file: if "binheap" in file: - out["binheap"]["strict"][flow_no] = data + out["binheap"]["strict"][flow_no] = data else: - out["specialized"]["strict"][flow_no] = data + out["specialized"]["strict"][flow_no] = data return out + def draw(data, stat, logic, unit): fig, ax = plt.subplots(1, 1) fig.set_size_inches(20, 10, forward=True) @@ -55,43 +52,48 @@ def draw(data, stat, logic, unit): ax.set_ylabel(stat, fontsize=20) else: ax.set_ylabel(f"{stat} ({unit})", fontsize=20) - + file = "" if logic == Logic.RR: specialized = ax.scatter( - data["specialized"]["round_robin"].keys(), - data["specialized"]["round_robin"].values(), - c='b') + data["specialized"]["round_robin"].keys(), + data["specialized"]["round_robin"].values(), + c="b", + ) binheap = ax.scatter( - data["binheap"]["round_robin"].keys(), - data["binheap"]["round_robin"].values(), - c='g') + data["binheap"]["round_robin"].keys(), + data["binheap"]["round_robin"].values(), + c="g", + ) - ax.set_title("Round Robin Queues", fontweight='bold', fontsize=20) + ax.set_title("Round Robin Queues", fontweight="bold", fontsize=20) file = append_path_prefix(f"{stat}_round_robin") elif logic == Logic.STRICT: specialized = ax.scatter( - data["specialized"]["strict"].keys(), - data["specialized"]["strict"].values(), - c='b') + data["specialized"]["strict"].keys(), + data["specialized"]["strict"].values(), + c="b", + ) binheap = ax.scatter( - data["binheap"]["strict"].keys(), - data["binheap"]["strict"].values(), - c='g') + data["binheap"]["strict"].keys(), data["binheap"]["strict"].values(), c="g" + ) - ax.set_title("Strict Queues", fontweight='bold', fontsize=20) + ax.set_title("Strict Queues", fontweight="bold", fontsize=20) file = append_path_prefix(f"{stat}_strict") - plt.legend((specialized, binheap), - ("Specialized (i.e. Cassandra style)", "Binary Heap"), - fontsize=12) + plt.legend( + (specialized, binheap), + ("Specialized (i.e. Cassandra style)", "Binary Heap"), + fontsize=12, + ) plt.savefig(file) print(f"Generated {file}.png") + # Parse data for round_robin and strict queues stat = sys.argv[1] data = {} @@ -107,7 +109,7 @@ def draw(data, stat, logic, unit): for logic in data[impl].keys(): for flow_no in data[impl][logic].keys(): cycles = cycle_data[impl][logic][flow_no] - slack = slack_data[impl][logic][flow_no] + slack = slack_data[impl][logic][flow_no] data[impl][logic][flow_no] = (1000 * cycles) / (7 - slack) else: file = sys.argv[2] @@ -115,5 +117,5 @@ def draw(data, stat, logic, unit): # Draw results unit = "μs" if stat == "total_time" else None -draw(data, stat, Logic.RR, unit) +draw(data, stat, Logic.RR, unit) draw(data, stat, Logic.STRICT, unit) diff --git a/frontends/queues/queues/binheap/binheap.py b/frontends/queues/queues/binheap/binheap.py index 63938730ca..0d74f5f5e3 100644 --- a/frontends/queues/queues/binheap/binheap.py +++ b/frontends/queues/queues/binheap/binheap.py @@ -52,7 +52,7 @@ def insert_binheap(prog, name, queue_size_factor, rnk_w, val_w): comp = prog.component(name) - max_queue_size = 2 ** queue_size_factor + max_queue_size = 2**queue_size_factor addr_size = queue_size_factor cmd = comp.input("cmd", 1) diff --git a/frontends/queues/queues/binheap/flow_inference.py b/frontends/queues/queues/binheap/flow_inference.py index 27744e3381..659e6dd696 100644 --- a/frontends/queues/queues/binheap/flow_inference.py +++ b/frontends/queues/queues/binheap/flow_inference.py @@ -11,22 +11,19 @@ def insert_flow_inference(comp, value, flow, boundaries, name): guard = comp.and_(1) with comp.comb_group(f"{name}_bound_check_{b}") as bound_check_b: - le.left = value + le.left = value le.right = boundaries[b] if b > 0: - lt.left = boundaries[b-1] + lt.left = boundaries[b - 1] lt.right = value else: - lt.left = 0 + lt.left = 0 lt.right = 1 guard.left = le.out guard.right = lt.out set_flow_b = comp.reg_store(flow, b, f"{name}_set_flow_{b}") - bound_check = cb.if_with( - cb.CellAndGroup(guard, bound_check_b), - set_flow_b - ) + bound_check = cb.if_with(cb.CellAndGroup(guard, bound_check_b), set_flow_b) bound_checks.append(bound_check) diff --git a/frontends/queues/queues/binheap/round_robin.py b/frontends/queues/queues/binheap/round_robin.py index 7e02d1ffd0..312878dce8 100644 --- a/frontends/queues/queues/binheap/round_robin.py +++ b/frontends/queues/queues/binheap/round_robin.py @@ -25,12 +25,12 @@ def insert_binheap_rr(prog, name, boundaries, queue_size_factor=FACTOR): flow_in = comp.reg(bits_needed(n - 1), "flow_in") infer_flow_in = insert_flow_inference( - comp, value, flow_in, boundaries, "infer_flow_in" + comp, value, flow_in, boundaries, "infer_flow_in" ) flow_out = comp.reg(bits_needed(n - 1), "flow_out") infer_flow_out = insert_flow_inference( - comp, ans.out, flow_out, boundaries, "infer_flow_out" + comp, ans.out, flow_out, boundaries, "infer_flow_out" ) rank_ptrs = [comp.reg(32, f"r_{i}") for i in range(n)] @@ -39,19 +39,14 @@ def insert_binheap_rr(prog, name, boundaries, queue_size_factor=FACTOR): turn = comp.reg(bits_needed(n - 1), "turn") turn_neq_flow_out = comp.neq_use(turn.out, flow_out.out) turn_incr_mod_n = cb.if_with( - comp.eq_use(turn.out, n - 1), - comp.reg_store(turn, 0), - comp.incr(turn) - ) + comp.eq_use(turn.out, n - 1), comp.reg_store(turn, 0), comp.incr(turn) + ) init = comp.reg(1, "init") init_eq_0 = comp.eq_use(init.out, 0) init_state = cb.if_with( init_eq_0, - [ - cb.par(*[ comp.reg_store(rank_ptrs[i], i) for i in range(n) ]), - comp.incr(init) - ] + [cb.par(*[comp.reg_store(rank_ptrs[i], i) for i in range(n)]), comp.incr(init)], ) def binheap_invoke(value, rank): @@ -63,35 +58,27 @@ def binheap_invoke(value, rank): ref_ans=ans, ref_err=err, ) - binheap_invokes = dict([ - (i, binheap_invoke(value, rank_ptrs[i].out)) - for i in range(n) - ]) + + binheap_invokes = dict( + [(i, binheap_invoke(value, rank_ptrs[i].out)) for i in range(n)] + ) update_state_pop = [ - infer_flow_out, - cb.while_with( - turn_neq_flow_out, - [ - comp.case(turn.out, rank_ptr_incrs), - turn_incr_mod_n - ] - ), - turn_incr_mod_n - ] + infer_flow_out, + cb.while_with( + turn_neq_flow_out, [comp.case(turn.out, rank_ptr_incrs), turn_incr_mod_n] + ), + turn_incr_mod_n, + ] update_state_push = comp.case(flow_in.out, rank_ptr_incrs) - + comp.control += [ init_state, infer_flow_in, comp.case(flow_in.out, binheap_invokes), cb.if_with( - err_eq_0, - comp.case( - cmd, - { 0: update_state_pop, 1: update_state_push } - ) - ) + err_eq_0, comp.case(cmd, {0: update_state_pop, 1: update_state_push}) + ), ] return comp diff --git a/frontends/queues/queues/binheap/strict.py b/frontends/queues/queues/binheap/strict.py index 0d545ce0ec..1dcc74c5d6 100644 --- a/frontends/queues/queues/binheap/strict.py +++ b/frontends/queues/queues/binheap/strict.py @@ -7,12 +7,7 @@ FACTOR = 4 -def insert_binheap_strict( - prog, - name, - boundaries, - order, - queue_size_factor=FACTOR): +def insert_binheap_strict(prog, name, boundaries, order, queue_size_factor=FACTOR): n = len(boundaries) comp = prog.component(name) @@ -27,9 +22,7 @@ def insert_binheap_strict( err = comp.reg(1, "err", is_ref=True) flow = comp.reg(bits_needed(n - 1), "flow") - infer_flow = insert_flow_inference( - comp, value, flow, boundaries, "infer_flow" - ) + infer_flow = insert_flow_inference(comp, value, flow, boundaries, "infer_flow") def binheap_invoke(value, rank): return cb.invoke( @@ -40,12 +33,12 @@ def binheap_invoke(value, rank): ref_ans=ans, ref_err=err, ) - binheap_invokes = dict([ - (i, binheap_invoke(value, order.index(i))) - for i in range(n) - ]) - comp.control += [ infer_flow, comp.case(flow.out, binheap_invokes) ] + binheap_invokes = dict( + [(i, binheap_invoke(value, order.index(i))) for i in range(n)] + ) + + comp.control += [infer_flow, comp.case(flow.out, binheap_invokes)] return comp @@ -77,4 +70,3 @@ def generate(prog, numflows): pifo = insert_binheap_strict(prog, "pifo", boundaries, order) return pifo - diff --git a/frontends/queues/queues/fifo.py b/frontends/queues/queues/fifo.py index dab877e0da..a4640fa4b9 100644 --- a/frontends/queues/queues/fifo.py +++ b/frontends/queues/queues/fifo.py @@ -23,7 +23,7 @@ def insert_fifo(prog, name, queue_len_factor=QUEUE_LEN_FACTOR, val_width=32): # If it is 1, we push `value` to the queue. value = fifo.input("value", val_width) # The value to push to the queue - max_queue_len = 2 ** queue_len_factor + max_queue_len = 2**queue_len_factor mem = fifo.seq_mem_d1("mem", val_width, max_queue_len, queue_len_factor) write = fifo.reg(queue_len_factor) # The next address to write to read = fifo.reg(queue_len_factor) # The next address to read from diff --git a/frontends/queues/queues/strict_or_rr.py b/frontends/queues/queues/strict_or_rr.py index 1b44909674..0b3ec20069 100644 --- a/frontends/queues/queues/strict_or_rr.py +++ b/frontends/queues/queues/strict_or_rr.py @@ -62,7 +62,7 @@ def insert_queue( hot = pifo.reg(32, "hot") # A register that marks the next sub-queue to `pop` from. og_hot = pifo.reg(32, "og_hot") copy_hot = pifo.reg_store(og_hot, hot.out) # og_hot := hot.out - max_queue_len = 2 ** queue_len_factor + max_queue_len = 2**queue_len_factor # Some equality checks. len_eq_0 = pifo.eq_use(length.out, 0) @@ -189,6 +189,7 @@ def insert_queue( return pifo + def generate(prog, numflows, roundrobin): """Top-level function to build the program.""" diff --git a/frontends/queues/test_data_gen/binheap_oracle.py b/frontends/queues/test_data_gen/binheap_oracle.py index 74320a5f89..8fa6b560e5 100644 --- a/frontends/queues/test_data_gen/binheap_oracle.py +++ b/frontends/queues/test_data_gen/binheap_oracle.py @@ -1,8 +1,8 @@ # For usage, see gen_queue_data_expect.sh import sys -import queues -import util +import queues +import util if __name__ == "__main__": @@ -10,5 +10,7 @@ keepgoing = "--keepgoing" in sys.argv commands, values, ranks, _ = util.parse_json(True) binheap = queues.Binheap(len) - ans = queues.operate_queue(binheap, max_cmds, commands, values, ranks=ranks, keepgoing=keepgoing) + ans = queues.operate_queue( + binheap, max_cmds, commands, values, ranks=ranks, keepgoing=keepgoing + ) util.dump_json(commands, values, ans, ranks=ranks) diff --git a/frontends/queues/test_data_gen/complex_tree_oracle.py b/frontends/queues/test_data_gen/complex_tree_oracle.py index ae5fb51efe..10dcaeeeba 100644 --- a/frontends/queues/test_data_gen/complex_tree_oracle.py +++ b/frontends/queues/test_data_gen/complex_tree_oracle.py @@ -1,8 +1,8 @@ # For usage, see gen_queue_data_expect.sh import sys -import queues -import util +import queues +import util if __name__ == "__main__": @@ -15,7 +15,7 @@ subqueues3 = [queues.Fifo(len) for _ in range(3)] # a second subqueue copy is required, we cannot pass the same subqueue across more than one function call - subqueues3s = [queues.Fifo(len) for _ in range(3)] + subqueues3s = [queues.Fifo(len) for _ in range(3)] subqueues2 = [queues.Fifo(len) for _ in range(2)] pifo = queues.RRQueue( diff --git a/frontends/queues/test_data_gen/fifo_oracle.py b/frontends/queues/test_data_gen/fifo_oracle.py index 59618476c2..945a4f72b0 100644 --- a/frontends/queues/test_data_gen/fifo_oracle.py +++ b/frontends/queues/test_data_gen/fifo_oracle.py @@ -2,7 +2,7 @@ import sys import queues -import util +import util if __name__ == "__main__": diff --git a/frontends/queues/test_data_gen/nwc_simple_oracle.py b/frontends/queues/test_data_gen/nwc_simple_oracle.py index 9a2b77af4c..53fb393c6b 100644 --- a/frontends/queues/test_data_gen/nwc_simple_oracle.py +++ b/frontends/queues/test_data_gen/nwc_simple_oracle.py @@ -1,8 +1,8 @@ # For usage, see gen_queue_data_expect.sh import sys -import queues -import util +import queues +import util if __name__ == "__main__": @@ -10,5 +10,7 @@ max_cmds, len = int(sys.argv[1]), int(sys.argv[2]) keepgoing = "--keepgoing" in sys.argv queue = queues.NWCSimple(len) - ans = queues.operate_queue(queue, max_cmds, commands, values, ranks, times=times, keepgoing=keepgoing) + ans = queues.operate_queue( + queue, max_cmds, commands, values, ranks, times=times, keepgoing=keepgoing + ) util.dump_json(commands, values, ans, ranks, times) diff --git a/frontends/queues/test_data_gen/pcq_oracle.py b/frontends/queues/test_data_gen/pcq_oracle.py index 51d4dd31f2..9cc8faf82c 100644 --- a/frontends/queues/test_data_gen/pcq_oracle.py +++ b/frontends/queues/test_data_gen/pcq_oracle.py @@ -10,5 +10,7 @@ max_cmds, len = int(sys.argv[1]), int(sys.argv[2]) keepgoing = "--keepgoing" in sys.argv pcq = queues.PCQ(len) - ans = queues.operate_queue(pcq, max_cmds, commands, values, ranks, times=times, keepgoing=keepgoing) + ans = queues.operate_queue( + pcq, max_cmds, commands, values, ranks, times=times, keepgoing=keepgoing + ) util.dump_json(commands, values, ans, ranks, times) diff --git a/frontends/queues/test_data_gen/pieo_oracle.py b/frontends/queues/test_data_gen/pieo_oracle.py index c311b51561..e95441de21 100644 --- a/frontends/queues/test_data_gen/pieo_oracle.py +++ b/frontends/queues/test_data_gen/pieo_oracle.py @@ -10,5 +10,7 @@ max_cmds, len = int(sys.argv[1]), int(sys.argv[2]) keepgoing = "--keepgoing" in sys.argv pieo = queues.Pieo(len) - ans = queues.operate_queue(pieo, max_cmds, commands, values, ranks, times=times, keepgoing=keepgoing) + ans = queues.operate_queue( + pieo, max_cmds, commands, values, ranks, times=times, keepgoing=keepgoing + ) util.dump_json(commands, values, ans, ranks, times) diff --git a/frontends/queues/test_data_gen/pifo_oracle.py b/frontends/queues/test_data_gen/pifo_oracle.py index 39ca4e852a..7e48897eaa 100644 --- a/frontends/queues/test_data_gen/pifo_oracle.py +++ b/frontends/queues/test_data_gen/pifo_oracle.py @@ -2,7 +2,7 @@ import sys import queues -import util +import util if __name__ == "__main__": diff --git a/frontends/queues/test_data_gen/pifo_tree_oracle.py b/frontends/queues/test_data_gen/pifo_tree_oracle.py index caac323895..268e98bcc1 100644 --- a/frontends/queues/test_data_gen/pifo_tree_oracle.py +++ b/frontends/queues/test_data_gen/pifo_tree_oracle.py @@ -1,7 +1,7 @@ # For usage, see gen_queue_data_expect.sh import sys -import queues +import queues import util diff --git a/frontends/queues/test_data_gen/rr_queue_oracle.py b/frontends/queues/test_data_gen/rr_queue_oracle.py index 3275019649..5d954130bf 100644 --- a/frontends/queues/test_data_gen/rr_queue_oracle.py +++ b/frontends/queues/test_data_gen/rr_queue_oracle.py @@ -1,8 +1,8 @@ # For usage, see gen_queue_data_expect.sh import sys -import queues -import util +import queues +import util if __name__ == "__main__": @@ -27,7 +27,7 @@ boundaries = [50, 100, 150, 200, 250, 300, 400] else: raise ValueError("Unsupported number of flows") - + subqueues = [queues.Fifo(len) for _ in range(numflows)] # Our Round Robin Queue orchestrates n subqueues, in this case provided as diff --git a/frontends/queues/test_data_gen/util.py b/frontends/queues/test_data_gen/util.py index 7fefa8fee1..261b7f5fe1 100644 --- a/frontends/queues/test_data_gen/util.py +++ b/frontends/queues/test_data_gen/util.py @@ -18,17 +18,19 @@ def parse_json(parse_ranks=False, parse_times=False): if parse_times: times = data["times"]["data"] - #Return tuple of data - return commands, values, (ranks if parse_ranks else None), (times if parse_times else None) + # Return tuple of data + return ( + commands, + values, + (ranks if parse_ranks else None), + (times if parse_times else None), + ) def dump_json(commands, values, ans_mem, ranks=None, times=None): """Prints a JSON representation of the data to stdout.""" - payload = { - "ans_mem": ans_mem, - "commands": commands - } + payload = {"ans_mem": ans_mem, "commands": commands} if ranks: payload["ranks"] = ranks @@ -37,5 +39,5 @@ def dump_json(commands, values, ans_mem, ranks=None, times=None): if times: payload["times"] = times - + print(json.dumps(payload, indent=2)) diff --git a/fud/fud/stages/__init__.py b/fud/fud/stages/__init__.py index 30c9573b41..22625354f3 100644 --- a/fud/fud/stages/__init__.py +++ b/fud/fud/stages/__init__.py @@ -268,7 +268,6 @@ def _define_steps( class ComputationGraph: - """Construct the computation graph for a stage""" def __init__( diff --git a/fud/fud/stages/verilator/stage.py b/fud/fud/stages/verilator/stage.py index f4be6a1f2f..4765e036c3 100644 --- a/fud/fud/stages/verilator/stage.py +++ b/fud/fud/stages/verilator/stage.py @@ -120,7 +120,9 @@ def mktmp() -> SourceType.Directory: return TmpDir() # Step 2a: Dynamically retrieve the value of stages.verilog.data - @builder.step(description="Dynamically retrieve the value of stages.verilog.data") + @builder.step( + description="Dynamically retrieve the value of stages.verilog.data" + ) def get_verilog_data() -> SourceType.Path: data_path = config.get(["stages", "verilog", "data"]) path = Path(data_path) if data_path else None diff --git a/fud/fud/stages/xilinx/execution.py b/fud/fud/stages/xilinx/execution.py index 86a496e3c9..fe6eccd338 100644 --- a/fud/fud/stages/xilinx/execution.py +++ b/fud/fud/stages/xilinx/execution.py @@ -83,11 +83,11 @@ def configure(): # Create the `emconfig.json` file that the simulator loudly (but # perhaps unnecessarily?) complains about if it's missing. - if emu_mode != 'hw': + if emu_mode != "hw": platform = config["stages", "xclbin", "device"] - utilpath = os.path.join(vitis_path, 'bin', 'emconfigutil') + utilpath = os.path.join(vitis_path, "bin", "emconfigutil") shell( - f'{utilpath} --platform {platform} --od {new_dir.name}', + f"{utilpath} --platform {platform} --od {new_dir.name}", capture_stdout=False, stdout_as_debug=True, ) @@ -113,13 +113,15 @@ def run(xclbin: SourceType.Path) -> SourceType.String: envs = { "XRT_INI_PATH": xrt_ini_path, } - if emu_mode != 'hw': + if emu_mode != "hw": # `hw` denotes actual hardware execution. In other modes, # configure emulation. - envs.update({ - "EMCONFIG_PATH": new_dir.name, - "XCL_EMULATION_MODE": emu_mode, # hw_emu or hw - }) + envs.update( + { + "EMCONFIG_PATH": new_dir.name, + "XCL_EMULATION_MODE": emu_mode, # hw_emu or hw + } + ) # Invoke xclrun. start_time = time.time() diff --git a/fud/fud/utils.py b/fud/fud/utils.py index c0b9189f5b..34859b7de0 100644 --- a/fud/fud/utils.py +++ b/fud/fud/utils.py @@ -163,7 +163,9 @@ def string_to_bytes(data: str) -> bytes: return data.encode("UTF-8") -def shell(cmd, stdin=None, stdout_as_debug=False, capture_stdout=True, env=None, cwd=None): +def shell( + cmd, stdin=None, stdout_as_debug=False, capture_stdout=True, env=None, cwd=None +): """Run `cmd` as a shell command. Return an output stream (or None if stdout is not captured). Raise