1+ [build-system ]
2+ requires = [" setuptools>=61.0" ]
3+ build-backend = " setuptools.build_meta"
4+
5+ [project ]
6+ name = " ParProcCo"
7+ dynamic = [" version" ]
8+ description = " Parallel Processing Coordinator. Splits dataset processing to run parallel cluster jobs and aggregates outputs"
9+ readme = " README.md"
10+ license = { file = " LICENSE" }
11+ authors = [
12+ {name = " Peter Chang" }
13+ ]
14+ maintainers = [
15+ {name = " Data Analysis group" , email = " dataanalysis@diamond.ac.uk" }
16+ ]
17+ classifiers = [
18+ " License :: OSI Approved :: Apache Software License" ,
19+ " Natural Language :: English" ,
20+ " Programming Language :: Python :: 3 :: Only" ,
21+ ]
22+ requires-python = " >= 3.10"
23+ dependencies = [
24+ " h5py" ,
25+ " pydantic" ,
26+ " pyyaml" ,
27+ " requests"
28+ ]
29+
30+ [project .optional-dependencies ]
31+ testing = [
32+ " parameterized" ,
33+ " pytest" ,
34+ ]
35+ dev = [
36+ " datamodel-code-generator" ,
37+ ]
38+
39+ [project .urls ]
40+ repository = " https://github.com/DiamondLightSource/ParProcCo"
41+
42+ [tool .setuptools .dynamic ]
43+ version = {attr = " ParProcCo.__version__" }
44+
45+ [tool .setuptools .packages .find ]
46+ include = [" ParProcCo" , " ParProcCo.slurm" , " ParProcCo.test" ]
47+ namespaces = false
48+
49+ [tool .setuptools ]
50+ script-files = [
51+ " scripts/nxdata_aggregate" ,
52+ " scripts/ppc_cluster_runner" ,
53+ " scripts/ppc_cluster_submit" ,
54+ ]
55+
156[tool .pytest .ini_options ]
257log_cli = true
358
@@ -6,3 +61,4 @@ plugins = "numpy.typing.mypy_plugin"
661
762[tool .ruff ]
863line-length = 88
64+
0 commit comments