Skip to content

Commit 05a22bd

Browse files
committed
Merge remote-tracking branch 'upstream/dev' into fix/tests_parallelism_safe
2 parents ee0943a + 76eede6 commit 05a22bd

File tree

9 files changed

+48
-62
lines changed

9 files changed

+48
-62
lines changed

docker/Dockerfile.finn

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,7 @@ RUN pip install pytest-metadata==1.7.0
118118
RUN pip install pytest-html==3.0.0
119119
RUN pip install pytest-html-merger==0.0.8
120120
RUN pip install pytest-cov==4.1.0
121+
RUN pip install pyyaml==6.0.1
121122

122123
# extra dependencies from other FINN deps
123124
# installed in Docker image to make entrypoint script go faster

docker/jenkins/Jenkinsfile

Lines changed: 11 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ pipeline {
9393
cleanPreviousBuildFiles(env.FINN_HOST_BUILD_DIR)
9494

9595
// Pass in the marker to run with pytest and the XML test results filename
96-
runDockerPytestWithMarker("fpgadataflow", "${env.TEST_NAME}", "--cov --cov-report=html:coverage_fpgadataflow")
96+
runDockerPytestWithMarker("fpgadataflow", "${env.TEST_NAME}", "--cov --cov-report=html:coverage_fpgadataflow -n ${env.NUM_PYTEST_WORKERS} --dist worksteal")
9797

9898
// Stash the test results file(s)
9999
stash name: env.TEST_NAME, includes: "${env.TEST_NAME}.xml,${env.TEST_NAME}.html"
@@ -324,21 +324,17 @@ void runDockerPytestWithMarker(String marker, String testResultsFilename, String
324324
sh """./run-docker.sh python -m pytest -m ${marker} --junitxml=${testResultsFilename}.xml --html=${testResultsFilename}.html --self-contained-html ${additionalOptions}"""
325325
}
326326

327-
def findBoardBuildFiles(String searchDir, String dirToFind) {
328-
def result = sh(script: "find $searchDir -type d -name \"$dirToFind*\"", returnStdout: true).trim()
329-
if (result.empty) {
330-
error "Directory containing '$dirToFind' not found."
331-
}
332-
return result
333-
}
334-
335327
void findCopyZip(String board, String findDir, String copyDir) {
336-
def buildDir = findBoardBuildFiles(findDir, "hw_deployment_${board}")
337-
sh "cp -r ${buildDir}/${board} ${copyDir}/"
338-
dir(copyDir) {
339-
sh "zip -r ${board}.zip ${board}/"
340-
sh "mkdir -p ${env.ARTIFACT_DIR}/${copyDir}/"
341-
sh "cp ${board}.zip ${env.ARTIFACT_DIR}/${copyDir}/"
328+
sh "mkdir -p ${copyDir}"
329+
try {
330+
sh "cp -r ${findDir}/hw_deployment_*/${board} ${copyDir}/"
331+
dir(copyDir) {
332+
sh "zip -r ${board}.zip ${board}/"
333+
sh "mkdir -p ${env.ARTIFACT_DIR}/${copyDir}/"
334+
sh "cp ${board}.zip ${env.ARTIFACT_DIR}/${copyDir}/"
335+
}
336+
} catch (err) {
337+
error "No ${board} hw_deployment_* build artifacts found in ${findDir}"
342338
}
343339
}
344340

notebooks/end2end_example/bnn-pynq/cnv_end2end_example.ipynb

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -484,8 +484,7 @@
484484
"metadata": {},
485485
"outputs": [],
486486
"source": [
487-
"from shutil import copy\n",
488-
"from distutils.dir_util import copy_tree\n",
487+
"from shutil import copy, copytree\n",
489488
"\n",
490489
"# create directory for deployment files\n",
491490
"deployment_dir = make_build_dir(prefix=\"pynq_deployment_\")\n",
@@ -503,7 +502,7 @@
503502
"\n",
504503
"# driver.py and python libraries\n",
505504
"pynq_driver_dir = model.get_metadata_prop(\"pynq_driver_dir\")\n",
506-
"copy_tree(pynq_driver_dir, deployment_dir)"
505+
"copytree(pynq_driver_dir, deployment_dir, dirs_exist_ok=True)"
507506
]
508507
},
509508
{

notebooks/end2end_example/bnn-pynq/tfc_end2end_example.ipynb

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -895,8 +895,7 @@
895895
"metadata": {},
896896
"outputs": [],
897897
"source": [
898-
"from shutil import copy\n",
899-
"from distutils.dir_util import copy_tree\n",
898+
"from shutil import copy, copytree\n",
900899
"\n",
901900
"# create directory for deployment files\n",
902901
"deployment_dir = make_build_dir(prefix=\"pynq_deployment_\")\n",
@@ -914,7 +913,7 @@
914913
"\n",
915914
"# driver.py and python libraries\n",
916915
"pynq_driver_dir = model.get_metadata_prop(\"pynq_driver_dir\")\n",
917-
"copy_tree(pynq_driver_dir, deployment_dir)"
916+
"copytree(pynq_driver_dir, deployment_dir, dirs_exist_ok=True)"
918917
]
919918
},
920919
{

run-docker.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ SCRIPTPATH=$(dirname "$SCRIPT")
8888
: ${PLATFORM_REPO_PATHS="/opt/xilinx/platforms"}
8989
: ${XRT_DEB_VERSION="xrt_202220.2.14.354_22.04-amd64-xrt"}
9090
: ${FINN_HOST_BUILD_DIR="/tmp/$DOCKER_INST_NAME"}
91-
: ${FINN_DOCKER_TAG="xilinx/finn:$(git describe --always --tags --dirty).$XRT_DEB_VERSION"}
91+
: ${FINN_DOCKER_TAG="xilinx/finn:$(OLD_PWD=$(pwd); cd $SCRIPTPATH; git describe --always --tags --dirty; cd $OLD_PWD).$XRT_DEB_VERSION"}
9292
: ${FINN_DOCKER_PREBUILT="0"}
9393
: ${FINN_DOCKER_RUN_AS_ROOT="0"}
9494
: ${FINN_DOCKER_GPU="$(docker info | grep nvidia | wc -m)"}

src/finn/builder/build_dataflow_steps.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@
3333
import shutil
3434
import warnings
3535
from copy import deepcopy
36-
from distutils.dir_util import copy_tree
3736
from functools import partial
3837
from qonnx.core.modelwrapper import ModelWrapper
3938
from qonnx.custom_op.registry import getCustomOp
@@ -656,7 +655,9 @@ def step_create_stitched_ip(model: ModelWrapper, cfg: DataflowBuildConfig):
656655
)
657656
)
658657
# TODO copy all ip sources into output dir? as zip?
659-
copy_tree(model.get_metadata_prop("vivado_stitch_proj"), stitched_ip_dir)
658+
shutil.copytree(
659+
model.get_metadata_prop("vivado_stitch_proj"), stitched_ip_dir, dirs_exist_ok=True
660+
)
660661
print("Vivado stitched IP written into " + stitched_ip_dir)
661662
if VerificationStepType.STITCHED_IP_RTLSIM in cfg._resolve_verification_steps():
662663
# prepare ip-stitched rtlsim
@@ -761,7 +762,7 @@ def step_make_pynq_driver(model: ModelWrapper, cfg: DataflowBuildConfig):
761762
if DataflowOutputType.PYNQ_DRIVER in cfg.generate_outputs:
762763
driver_dir = cfg.output_dir + "/driver"
763764
model = model.transform(MakePYNQDriver(cfg._resolve_driver_platform()))
764-
copy_tree(model.get_metadata_prop("pynq_driver_dir"), driver_dir)
765+
shutil.copytree(model.get_metadata_prop("pynq_driver_dir"), driver_dir, dirs_exist_ok=True)
765766
print("PYNQ Python driver written into " + driver_dir)
766767
return model
767768

@@ -862,8 +863,8 @@ def step_deployment_package(model: ModelWrapper, cfg: DataflowBuildConfig):
862863
bitfile_dir = cfg.output_dir + "/bitfile"
863864
driver_dir = cfg.output_dir + "/driver"
864865
os.makedirs(deploy_dir, exist_ok=True)
865-
copy_tree(bitfile_dir, deploy_dir + "/bitfile")
866-
copy_tree(driver_dir, deploy_dir + "/driver")
866+
shutil.copytree(bitfile_dir, deploy_dir + "/bitfile", dirs_exist_ok=True)
867+
shutil.copytree(driver_dir, deploy_dir + "/driver", dirs_exist_ok=True)
867868
return model
868869

869870

src/finn/custom_op/fpgadataflow/concat.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929

3030
import numpy as np
3131
from qonnx.core.datatype import DataType
32-
from qonnx.util.basic import roundup_to_integer_multiple
3332

3433
from finn.custom_op.fpgadataflow.hwcustomop import HWCustomOp
3534

@@ -134,10 +133,6 @@ def execute_node(self, context, graph):
134133
result = np.concatenate(inp_values, axis=-1)
135134
context[node.output[0]] = result
136135

137-
def get_instream_width_padded(self, ind=0):
138-
in_width = self.get_instream_width(ind)
139-
return roundup_to_integer_multiple(in_width, 8)
140-
141136
def get_verilog_top_module_intf_names(self):
142137
intf_names = super().get_verilog_top_module_intf_names()
143138
n_inputs = self.get_n_inputs()

src/finn/custom_op/fpgadataflow/hls/lookup_hls.py

Lines changed: 13 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828

2929
import numpy as np
3030
import os
31+
import warnings
3132
from math import ceil, log2
3233
from qonnx.core.datatype import DataType
3334

@@ -87,31 +88,6 @@ def defines(self, var):
8788
my_defines.append("#define EmbeddingType %s" % emb_hls_type)
8889
self.code_gen_dict["$DEFINES$"] = my_defines
8990

90-
def read_npy_data(self):
91-
code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
92-
dtype = self.get_input_datatype()
93-
if dtype == DataType["BIPOLAR"]:
94-
# use binary for bipolar storage
95-
dtype = DataType["BINARY"]
96-
elem_bits = dtype.bitwidth()
97-
packed_bits = self.get_instream_width()
98-
packed_hls_type = "ap_uint<%d>" % packed_bits
99-
elem_hls_type = dtype.get_hls_datatype_str()
100-
npy_type = "int64_t"
101-
npy_in = "%s/input_0.npy" % code_gen_dir
102-
self.code_gen_dict["$READNPYDATA$"] = []
103-
self.code_gen_dict["$READNPYDATA$"].append(
104-
'npy2apintstream<%s, %s, %d, %s>("%s", in0_%s);'
105-
% (
106-
packed_hls_type,
107-
elem_hls_type,
108-
elem_bits,
109-
npy_type,
110-
npy_in,
111-
self.hls_sname(),
112-
)
113-
)
114-
11591
def dataoutstrm(self):
11692
code_gen_dir = self.get_nodeattr("code_gen_dir_cppsim")
11793
dtype = self.get_output_datatype()
@@ -273,7 +249,18 @@ def execute_node(self, context, graph):
273249
)
274250

275251
inp = context[node.input[0]]
276-
assert inp.dtype == np.int64, "Inputs must be contained in int64 ndarray"
252+
253+
# Make sure the input has the right container datatype
254+
if inp.dtype is not np.float32:
255+
# Issue a warning to make the user aware of this type-cast
256+
warnings.warn(
257+
f"{node.name}: Changing input container datatype from "
258+
f"{inp.dtype} to {np.float32}"
259+
)
260+
# Convert the input to floating point representation as the
261+
# container datatype
262+
inp = inp.astype(np.float32)
263+
277264
assert inp.shape == exp_ishape, """Input shape doesn't match expected shape."""
278265
export_idt = self.get_input_datatype()
279266
odt = self.get_output_datatype()

src/finn/custom_op/fpgadataflow/rtl/streamingfifo_rtl.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -133,10 +133,18 @@ def execute_node(self, context, graph):
133133
elif mode == "rtlsim":
134134
code_gen_dir = self.get_nodeattr("code_gen_dir_ipgen")
135135
# create a npy file for the input of the node
136-
assert (
137-
str(inp.dtype) == "float32"
138-
), """Input datatype is
139-
not float32 as expected."""
136+
137+
# Make sure the input has the right container datatype
138+
if inp.dtype is not np.float32:
139+
# Issue a warning to make the user aware of this type-cast
140+
warnings.warn(
141+
f"{node.name}: Changing input container datatype from "
142+
f"{inp.dtype} to {np.float32}"
143+
)
144+
# Convert the input to floating point representation as the
145+
# container datatype
146+
inp = inp.astype(np.float32)
147+
140148
expected_inp_shape = self.get_folded_input_shape()
141149
reshaped_input = inp.reshape(expected_inp_shape)
142150
if DataType[self.get_nodeattr("dataType")] == DataType["BIPOLAR"]:

0 commit comments

Comments
 (0)