Skip to content

Commit b8cd10e

Browse files
authored
Merge pull request #2748 from PrincetonUniversity/devel
Devel
2 parents 06f3006 + 589e149 commit b8cd10e

File tree

282 files changed

+36850
-7778
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

282 files changed

+36850
-7778
lines changed

.github/actions/install-pnl/action.yml

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -44,14 +44,22 @@ runs:
4444
- name: Drop pytorch on x86
4545
shell: bash
4646
run: |
47+
echo > env_constraints.txt
4748
if [ $(python -c 'import struct; print(struct.calcsize("P") * 8)') == 32 ]; then
4849
sed -i /torch/d requirements.txt
4950
sed -i /modeci_mdf/d requirements.txt
5051
# pywinpty is a transitive dependency and v1.0+ removed support for x86 wheels
51-
# terminado >= 0.10.0 pulls in pywinpty >= 1.1.0
52+
echo "pywinpty<1" >> env_constraints.txt
53+
# jupyter_sever pulls jupyter_server_terminals which depends on in pywinpty >= 2.0.3
54+
echo "jupyter_server<2" >> env_constraints.txt
5255
# scipy >=1.9.2 doesn't provide win32 wheel and GA doesn't have working fortran on windows
56+
echo "scipy<1.9.2" >> env_constraints.txt
5357
# scikit-learn >= 1.1.3 doesn't provide win32 wheel
54-
[[ ${{ runner.os }} = Windows* ]] && pip install "pywinpty<1" "terminado<0.10" "scipy<1.9.2" "scikit-learn<1.1.3" "statsmodels<0.13.3" "jupyter-server<2" -c requirements.txt
58+
echo "scikit-learn<1.1.3" >> env_constraints.txt
59+
# countourpy >=1.1.0 doesn't provide win32 wheel
60+
echo "contourpy<1.1.0" >> env_constraints.txt
61+
# pillow >= 10.0.0 doesn't provide win32 wheel
62+
echo "pillow < 10.0.0" >> env_constraints.txt
5563
fi
5664
5765
- name: Install updated package
@@ -66,7 +74,7 @@ runs:
6674
echo "new_package=$NEW_PACKAGE" >> $GITHUB_OUTPUT
6775
# save a list of all installed packages (including pip, wheel; it's never empty)
6876
pip freeze --all > orig
69-
pip install "$(echo $NEW_PACKAGE | sed 's/[-_]/./g' | xargs grep *requirements.txt -h -e | head -n1)"
77+
pip install "$(echo $NEW_PACKAGE | sed 's/[-_]/./g' | xargs grep *requirements.txt -h -e | head -n1)" -c env_constraints.txt -c broken_trans_deps.txt
7078
pip show "$NEW_PACKAGE" | grep 'Version' | tee new_version.deps
7179
# uninstall new packages but skip those from previous steps (pywinpty, terminado on windows x86)
7280
# the 'orig' list is not empty (includes at least pip, wheel)
@@ -78,7 +86,7 @@ runs:
7886
- name: Python dependencies
7987
shell: bash
8088
run: |
81-
pip install -e .[${{ inputs.features }}]
89+
pip install -e .[${{ inputs.features }}] -c env_constraints.txt -c broken_trans_deps.txt
8290
8391
- name: "Cleanup old wheels"
8492
shell: bash

.github/workflows/pnl-ci-docs.yml

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ jobs:
2424
# The entire matrix is set up and 'base' builds are pruned based
2525
# on event name and final configuration (ubuntu, python3.7).
2626
matrix:
27-
python-version: [3.7, 3.8, 3.9]
27+
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
2828
python-architecture: ['x64']
2929
os: [ubuntu-latest, macos-latest, windows-latest]
3030
event:
@@ -37,9 +37,13 @@ jobs:
3737
pnl-version: 'base'
3838
- os: windows-latest
3939
pnl-version: 'base'
40-
- python-version: 3.8
40+
- python-version: '3.8'
4141
pnl-version: 'base'
42-
- python-version: 3.9
42+
- python-version: '3.9'
43+
pnl-version: 'base'
44+
- python-version: '3.10'
45+
pnl-version: 'base'
46+
- python-version: '3.11'
4347
pnl-version: 'base'
4448

4549
outputs:
@@ -74,7 +78,9 @@ jobs:
7478
- name: Set up Python ${{ matrix.python-version }}
7579
uses: actions/setup-python@v4
7680
with:
77-
python-version: ${{ matrix.python-version }}
81+
# Block python3.7.17 on macos. see:
82+
# https://github.com/actions/setup-python/issues/682
83+
python-version: ${{ (matrix.os == 'macos-latest' && matrix.python-version == '3.7') && '3.7.16' || matrix.python-version }}
7884
architecture: ${{ matrix.python-architecture }}
7985

8086
- name: Get pip cache location

.github/workflows/pnl-ci.yml

Lines changed: 46 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -48,22 +48,51 @@ jobs:
4848
strategy:
4949
fail-fast: false
5050
matrix:
51-
python-version: [3.7, 3.8, 3.9]
51+
python-version: ['3.7', '3.11']
5252
python-architecture: ['x64']
5353
extra-args: ['']
5454
os: [ubuntu, macos, windows]
5555
include:
56+
# code-coverage build on macos python 3.9
57+
- python-version: '3.9'
58+
os: macos
59+
extra-args: '--cov=psyneulink'
60+
61+
# --forked run of python only tests
62+
# Python tests are enough to test potential naming issues
63+
- python-version: '3.9'
64+
os: ubuntu
65+
extra-args: '--forked -m "not llvm"'
66+
5667
# add 32-bit build on windows
57-
- python-version: 3.8
68+
- python-version: '3.8'
5869
python-architecture: 'x86'
5970
os: windows
60-
# code-coverage build on macos python 3.9
61-
- python-version: 3.9
71+
72+
# fp32 run on linux python 3.10
73+
- python-version: '3.10'
74+
os: ubuntu
75+
extra-args: '--fp-precision=fp32'
76+
77+
# --benchmark-enable run on macos python 3.10
78+
- python-version: '3.10'
6279
os: macos
63-
extra-args: '--cov=psyneulink'
80+
# pytest needs both '--benchmark-only' and '-m benchmark'
81+
# The former fails the test if benchamrks cannot be enabled
82+
# The latter works around a crash in pytest when collecting tests:
83+
# https://github.com/ionelmc/pytest-benchmark/issues/243
84+
extra-args: '-m benchmark --benchmark-enable --benchmark-only --benchmark-min-rounds=2 --benchmark-max-time=0.001 --benchmark-warmup=off -n0 --dist=no'
85+
86+
# add python 3.8 build on macos since 3.7 is broken
87+
# https://github.com/actions/virtual-environments/issues/4230
88+
- python-version: '3.8'
89+
python-architecture: 'x64'
90+
os: macos
91+
6492
exclude:
65-
# 3.7 is broken on macos-11, https://github.com/actions/virtual-environments/issues/4230
66-
- python-version: 3.7
93+
# 3.7 is broken on macos-11,
94+
# https://github.com/actions/virtual-environments/issues/4230
95+
- python-version: '3.7'
6796
os: macos
6897

6998
steps:
@@ -115,17 +144,25 @@ jobs:
115144
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
116145
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
117146
147+
- name: Print test machine/env info
148+
shell: bash
149+
run: |
150+
python -c "import numpy; numpy.show_config()"
151+
case "$RUNNER_OS" in
152+
Linux*) lscpu;;
153+
esac
154+
118155
- name: Test with pytest
119156
timeout-minutes: 180
120157
run: pytest --junit-xml=tests_out.xml --verbosity=0 -n auto ${{ matrix.extra-args }}
121158

122159
- name: Upload test results
123160
uses: actions/upload-artifact@v3
124161
with:
125-
name: test-results-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.python-architecture }}-${{ matrix.extra-args }}
162+
name: test-results-${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.python-architecture }}
126163
path: tests_out.xml
127164
retention-days: 5
128-
if: success() || failure()
165+
if: (success() || failure()) && ! contains(matrix.extra-args, 'forked')
129166

130167
- name: Upload coveralls code coverage
131168
if: contains(matrix.extra-args, '--cov=psyneulink')

CONVENTIONS.md

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,11 @@ Extensions of Core objects
7474
- arguments_of_constructors, instance_attributes and instance_methods:
7575
lowercase and underscore separator(s) [constructor_arg, method_arg, object_attribute]
7676
- keywords:
77-
all capitals and underscore separator(s) [KEY_WORD]
77+
- all capitals and underscore separator(s) [KEY_WORD]
78+
- assigned values:
79+
- argument of a method or function: lower case [KEY_WORD = 'argument_value']
80+
- names of a Components: upper case [KEY_WORD = 'NAME']
81+
7882
DEPRECATED:
7983
- internal keywords:
8084
prepend kw followed by camelCase [kwKeyword]

Scripts/Debug/Yotam LCA Model LLVM.py

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,16 @@
2222
LCA_BIN_EXECUTE=os.getenv("LCA", "LLVMRun")
2323
RUN_TOTAL=True
2424

25+
def _get_execution_mode(bin_execute):
26+
if bin_execute.lower() == 'llvmrun':
27+
return pnl.ExecutionMode.LLVMRun
28+
elif bin_execute.lower() == 'pytorch':
29+
return pnl.ExecutionMode.PyTorch
30+
elif bin_execute.lower() == 'python':
31+
return pnl.ExecutionMode.Python
32+
33+
assert False, "Unknown execution mode: {}".format(bin_execute)
34+
2535
# read in bipartite graph, return graph object, number of possible tasks, number of
2636
# input dimensions and number of output dimensions.
2737
# file format Ni No (input dimension number, output dimension number)
@@ -171,12 +181,12 @@ def get_trained_network(bipartite_graph, num_features=3, num_hidden=200, epochs=
171181
mnet.learn(
172182
inputs=input_set,
173183
minibatch_size=1,
174-
bin_execute=MNET_BIN_EXECUTE,
184+
execution_mode=_get_execution_mode(MNET_BIN_EXECUTE),
175185
patience=patience,
176186
min_delta=min_delt,
177187
)
178188
t2 = time.time()
179-
print("training 1:", MNET_BIN_EXECUTE, t2-t1)
189+
print("training 1 time:", MNET_BIN_EXECUTE, t2 - t1)
180190

181191
# Apply LCA transform (values from Sebastian's code -- supposedly taken from the original LCA paper from Marius & Jay)
182192
if attach_LCA:
@@ -293,12 +303,12 @@ def get_trained_network_multLCA(bipartite_graph, num_features=3, num_hidden=200,
293303
mnet.learn(
294304
inputs=input_set,
295305
minibatch_size=input_set['epochs'],
296-
bin_execute=MNET_BIN_EXECUTE,
306+
execution_mode=_get_execution_mode(MNET_BIN_EXECUTE),
297307
patience=patience,
298308
min_delta=min_delt,
299309
)
300310
t2 = time.time()
301-
print("training 2:", MNET_BIN_EXECUTE, t2-t1)
311+
print("training 2 time:", MNET_BIN_EXECUTE, t2 - t1)
302312

303313
for projection in mnet.projections:
304314
if hasattr(projection.parameters, 'matrix'):
@@ -487,9 +497,10 @@ def evaluate_net_perf_lca(mnet_lca, test_tasks, all_tasks, num_features, num_inp
487497
}
488498
print('running LCA total')
489499
t1 = time.time()
490-
mnet_lca.run( { mnet_lca.nodes['mnet'] : inputs_total }, bin_execute=LCA_BIN_EXECUTE)
500+
mnet_lca.run({mnet_lca.nodes['mnet']: inputs_total},
501+
execution_mode=_get_execution_mode(LCA_BIN_EXECUTE))
491502
t2 = time.time()
492-
print("LCA total:", LCA_BIN_EXECUTE, t2 - t1)
503+
print("LCA run total time:", LCA_BIN_EXECUTE, t2 - t1)
493504
# Run the outer composition, one point at a time (for debugging purposes)
494505
for i in range(num_test_points):
495506
if RUN_TOTAL:
@@ -504,9 +515,10 @@ def evaluate_net_perf_lca(mnet_lca, test_tasks, all_tasks, num_features, num_inp
504515

505516
print('running LCA', i)
506517
t1 = time.time()
507-
mnet_lca.run( { mnet_lca.nodes['mnet'] : input_set['inputs'] }, bin_execute=LCA_BIN_EXECUTE )
518+
mnet_lca.run({mnet_lca.nodes['mnet']: input_set['inputs']},
519+
execution_mode=_get_execution_mode(LCA_BIN_EXECUTE))
508520
t2 = time.time()
509-
print("LCA:", LCA_BIN_EXECUTE, t2 - t1)
521+
print("LCA time:", LCA_BIN_EXECUTE, t2 - t1)
510522
iterations = mnet_lca.nodes['lca'].num_executions_before_finished if LCA_BIN_EXECUTE == "Python" else ugly_get_compile_param_value(mnet_lca, 'lca', 'num_executions_before_finished')
511523
print("ITERATIONS:", iterations)
512524
print('input: ', input_test_pts[i, :])
@@ -617,9 +629,9 @@ def evaluate_net_perf_mse(mnet, test_tasks, all_tasks, num_features, num_input_d
617629

618630
print("running mnet2:", MNET2_BIN_EXECUTE)
619631
t1 = time.time()
620-
mnet.run(input_set, bin_execute=MNET2_BIN_EXECUTE)
632+
mnet.run(input_set, execution_mode=_get_execution_mode(MNET2_BIN_EXECUTE))
621633
t2 = time.time()
622-
print("mnet2:", MNET2_BIN_EXECUTE, t2-t1)
634+
print("mnet2 time:", MNET2_BIN_EXECUTE, t2 - t1)
623635

624636
# Retrieve results
625637
output_test_pts = np.array(mnet.parameters.results.get(mnet)[-num_test_points:]).reshape(num_test_points, output_layer_size)

0 commit comments

Comments
 (0)