Skip to content

Commit 42d4e8c

Browse files
authored
Merge pull request #9 from camsys/data-type-dev-merge
Merge Develop Branch
2 parents ae126f1 + 9cd9bbe commit 42d4e8c

File tree

239 files changed

+95340
-6453
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

239 files changed

+95340
-6453
lines changed

.github/workflows/branch-docs.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ jobs:
4040
id: cache
4141

4242
- name: Update environment
43-
run: mamba env update -n docbuild -f conda-environments/docbuild.yml
43+
run: mamba env update --verbose -n docbuild -f conda-environments/docbuild.yml
4444
if: steps.cache.outputs.cache-hit != 'true'
4545

4646
- name: Install activitysim

.github/workflows/core_tests.yml

Lines changed: 90 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,21 @@ jobs:
4545
id: cache
4646

4747
- name: Update environment
48-
run: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
48+
run: |
49+
mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
50+
mamba install --yes \
51+
"psutil=5.9.5" \
52+
"pydantic=1.10.13" \
53+
"pypyr=5.8.0" \
54+
"pytables=3.6.1" \
55+
"pytest-cov" \
56+
"pytest-regressions=2.5.0" \
57+
"scikit-learn=1.2.2" \
58+
"sharrow>=2.6.0" \
59+
"simwrapper=1.8.5" \
60+
"xarray=2023.2.0" \
61+
"zarr=2.14.2" \
62+
"zstandard=0.21.0"
4963
if: steps.cache.outputs.cache-hit != 'true'
5064

5165
- name: Install activitysim
@@ -131,7 +145,21 @@ jobs:
131145
id: cache
132146

133147
- name: Update environment
134-
run: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
148+
run: |
149+
mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
150+
mamba install --yes \
151+
"psutil=5.9.5" \
152+
"pydantic=1.10.13" \
153+
"pypyr=5.8.0" \
154+
"pytables=3.6.1" \
155+
"pytest-cov" \
156+
"pytest-regressions=2.5.0" \
157+
"scikit-learn=1.2.2" \
158+
"sharrow>=2.6.0" \
159+
"simwrapper=1.8.5" \
160+
"xarray=2023.2.0" \
161+
"zarr=2.14.2" \
162+
"zstandard=0.21.0"
135163
if: steps.cache.outputs.cache-hit != 'true'
136164

137165
- name: Install activitysim
@@ -215,7 +243,21 @@ jobs:
215243
id: cache
216244

217245
- name: Update environment
218-
run: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
246+
run: |
247+
mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
248+
mamba install --yes \
249+
"psutil=5.9.5" \
250+
"pydantic=1.10.13" \
251+
"pypyr=5.8.0" \
252+
"pytables=3.6.1" \
253+
"pytest-cov" \
254+
"pytest-regressions=2.5.0" \
255+
"scikit-learn=1.2.2" \
256+
"sharrow>=2.6.0" \
257+
"simwrapper=1.8.5" \
258+
"xarray=2023.2.0" \
259+
"zarr=2.14.2" \
260+
"zstandard=0.21.0"
219261
if: steps.cache.outputs.cache-hit != 'true'
220262

221263
- name: Install activitysim
@@ -298,7 +340,21 @@ jobs:
298340
id: cache
299341

300342
- name: Update environment
301-
run: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
343+
run: |
344+
mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
345+
mamba install --yes \
346+
"psutil=5.9.5" \
347+
"pydantic=1.10.13" \
348+
"pypyr=5.8.0" \
349+
"pytables=3.6.1" \
350+
"pytest-cov" \
351+
"pytest-regressions=2.5.0" \
352+
"scikit-learn=1.2.2" \
353+
"sharrow>=2.6.0" \
354+
"simwrapper=1.8.5" \
355+
"xarray=2023.2.0" \
356+
"zarr=2.14.2" \
357+
"zstandard=0.21.0"
302358
if: steps.cache.outputs.cache-hit != 'true'
303359

304360
- name: Install activitysim
@@ -351,7 +407,21 @@ jobs:
351407
id: cache
352408

353409
- name: Update environment
354-
run: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
410+
run: |
411+
mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
412+
mamba install --yes \
413+
"psutil=5.9.5" \
414+
"pydantic=1.10.13" \
415+
"pypyr=5.8.0" \
416+
"pytables=3.6.1" \
417+
"pytest-cov" \
418+
"pytest-regressions=2.5.0" \
419+
"scikit-learn=1.2.2" \
420+
"sharrow>=2.6.0" \
421+
"simwrapper=1.8.5" \
422+
"xarray=2023.2.0" \
423+
"zarr=2.14.2" \
424+
"zstandard=0.21.0"
355425
if: steps.cache.outputs.cache-hit != 'true'
356426

357427
- name: Install activitysim
@@ -403,7 +473,21 @@ jobs:
403473
id: cache
404474

405475
- name: Update environment
406-
run: mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
476+
run: |
477+
mamba env update -n asim-test -f conda-environments/github-actions-tests.yml
478+
mamba install --yes \
479+
"psutil=5.9.5" \
480+
"pydantic=1.10.13" \
481+
"pypyr=5.8.0" \
482+
"pytables=3.6.1" \
483+
"pytest-cov" \
484+
"pytest-regressions=2.5.0" \
485+
"scikit-learn=1.2.2" \
486+
"sharrow>=2.6.0" \
487+
"simwrapper=1.8.5" \
488+
"xarray=2023.2.0" \
489+
"zarr=2.14.2" \
490+
"zstandard=0.21.0"
407491
if: steps.cache.outputs.cache-hit != 'true'
408492

409493
- name: Install Larch

.gitignore

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ sandbox/
55
.pytest_cache
66
.vagrant
77

8-
98
# Byte-compiled / optimized / DLL files
109
__pycache__/
1110
*.py[cod]

activitysim/abm/misc.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
import logging
66

7+
import numpy as np
78
import pandas as pd
89

910
from activitysim.core import workflow
@@ -16,16 +17,16 @@
1617

1718

1819
@workflow.cached_object
19-
def households_sample_size(state: workflow.State, override_hh_ids):
20+
def households_sample_size(state: workflow.State, override_hh_ids) -> int:
2021

2122
if override_hh_ids is None:
22-
return state.settings, households_sample_size
23+
return state.settings.households_sample_size
2324
else:
24-
return 0 if override_hh_ids is None else len(override_hh_ids)
25+
return len(override_hh_ids)
2526

2627

2728
@workflow.cached_object
28-
def override_hh_ids(state: workflow.State):
29+
def override_hh_ids(state: workflow.State) -> np.ndarray | None:
2930

3031
hh_ids_filename = state.settings.hh_ids
3132
if hh_ids_filename is None:
@@ -63,12 +64,12 @@ def override_hh_ids(state: workflow.State):
6364

6465

6566
@workflow.cached_object
66-
def trace_od(state: workflow.State):
67+
def trace_od(state: workflow.State) -> tuple[int, int] | None:
6768

6869
od = state.settings.trace_od
6970

7071
if od and not (
71-
isinstance(od, (list, tuple))
72+
isinstance(od, list | tuple)
7273
and len(od) == 2
7374
and all(isinstance(x, int) for x in od)
7475
):
@@ -81,12 +82,12 @@ def trace_od(state: workflow.State):
8182

8283

8384
@workflow.cached_object
84-
def chunk_size(state: workflow.State):
85+
def chunk_size(state: workflow.State) -> int:
8586
_chunk_size = int(state.settings.chunk_size or 0)
8687

8788
return _chunk_size
8889

8990

9091
@workflow.cached_object
91-
def check_for_variability(state: workflow.State):
92+
def check_for_variability(state: workflow.State) -> bool:
9293
return bool(state.settings.check_for_variability)

activitysim/abm/models/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
initialize,
1616
initialize_los,
1717
initialize_tours,
18+
input_checker,
1819
joint_tour_composition,
1920
joint_tour_destination,
2021
joint_tour_frequency,

activitysim/abm/models/accessibility.py

Lines changed: 78 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -3,39 +3,83 @@
33
from __future__ import annotations
44

55
import logging
6+
from typing import Any
67

78
import numba as nb
89
import numpy as np
910
import pandas as pd
1011

1112
from activitysim.core import assign, chunk, los, workflow
13+
from activitysim.core.configuration.base import PydanticReadable
1214

1315
logger = logging.getLogger(__name__)
1416

1517

18+
class AccessibilitySettings(PydanticReadable):
19+
"""
20+
Settings for aggregate accessibility component.
21+
"""
22+
23+
CONSTANTS: dict[str, Any] = {}
24+
25+
land_use_columns: list[str] = []
26+
"""Only include the these columns in the computational tables
27+
28+
Memory usage is reduced by only listing the minimum columns needed by
29+
the SPEC, and nothing extra.
30+
"""
31+
32+
SPEC: str = "accessibility.csv"
33+
"""Filename for the accessibility specification (csv) file."""
34+
35+
explicit_chunk: int = 0
36+
"""If > 0, use this chunk size instead of adaptive chunking."""
37+
38+
1639
@nb.njit
1740
def _accumulate_accessibility(arr, orig_zone_count, dest_zone_count):
1841
assert arr.size == orig_zone_count * dest_zone_count
19-
arr2 = arr.reshape((orig_zone_count, dest_zone_count))
42+
assert arr.ndim == 1
43+
i = 0
2044
result = np.empty((orig_zone_count,), dtype=arr.dtype)
2145
for o in range(orig_zone_count):
2246
x = 0
2347
for d in range(dest_zone_count):
24-
x += arr2[o, d]
48+
x += arr[i]
49+
i += 1
2550
result[o] = np.log1p(x)
2651
return result
2752

2853

2954
def compute_accessibilities_for_zones(
30-
state,
31-
accessibility_df,
32-
land_use_df,
33-
assignment_spec,
34-
constants,
35-
network_los,
36-
trace_label,
37-
chunk_sizer,
55+
state: workflow.State,
56+
accessibility_df: pd.DataFrame,
57+
land_use_df: pd.DataFrame,
58+
assignment_spec: dict,
59+
constants: dict,
60+
network_los: los.Network_LOS,
61+
trace_label: str,
62+
chunk_sizer: chunk.ChunkSizer,
3863
):
64+
"""
65+
Compute accessibility for each zone in land use file using expressions from accessibility_spec.
66+
67+
Parameters
68+
----------
69+
state : workflow.State
70+
accessibility_df : pd.DataFrame
71+
land_use_df : pd.DataFrame
72+
assignment_spec : dict
73+
constants : dict
74+
network_los : los.Network_LOS
75+
trace_label : str
76+
chunk_sizer : chunk.ChunkSizer
77+
78+
Returns
79+
-------
80+
accessibility_df : pd.DataFrame
81+
The accessibility_df is updated in place.
82+
"""
3983
orig_zones = accessibility_df.index.values
4084
dest_zones = land_use_df.index.values
4185

@@ -144,6 +188,10 @@ def compute_accessibility(
144188
land_use: pd.DataFrame,
145189
accessibility: pd.DataFrame,
146190
network_los: los.Network_LOS,
191+
model_settings: AccessibilitySettings | None = None,
192+
model_settings_file_name: str = "accessibility.yaml",
193+
trace_label: str = "compute_accessibility",
194+
output_table_name: str = "accessibility",
147195
) -> None:
148196
"""
149197
Compute accessibility for each zone in land use file using expressions from accessibility_spec
@@ -160,40 +208,47 @@ def compute_accessibility(
160208
product mutes large differences. The decay function on the walk accessibility measure is
161209
steeper than automobile or transit. The minimum accessibility is zero.
162210
"""
211+
if model_settings is None:
212+
model_settings = AccessibilitySettings.read_settings_file(
213+
state.filesystem, model_settings_file_name
214+
)
163215

164-
trace_label = "compute_accessibility"
165-
model_settings = state.filesystem.read_model_settings("accessibility.yaml")
166216
assignment_spec = assign.read_assignment_spec(
167-
state.filesystem.get_config_file_path("accessibility.csv")
217+
state.filesystem.get_config_file_path(model_settings.SPEC)
168218
)
169219

170220
accessibility_df = accessibility
171221
if len(accessibility_df.columns) > 0:
172222
logger.warning(
173-
f"accessibility table is not empty. Columns:{list(accessibility_df.columns)}"
223+
f"accessibility table is not empty. "
224+
f"Columns:{list(accessibility_df.columns)}"
174225
)
175226
raise RuntimeError("accessibility table is not empty.")
176227

177-
constants = model_settings.get("CONSTANTS", {})
228+
constants = model_settings.CONSTANTS
178229

179-
# only include the land_use columns needed by spec, as specified by land_use_columns model_setting
180-
land_use_columns = model_settings.get("land_use_columns", [])
230+
# only include the land_use columns needed by spec,
231+
# as specified by land_use_columns model_setting
232+
land_use_columns = model_settings.land_use_columns
181233
land_use_df = land_use
182234
land_use_df = land_use_df[land_use_columns]
183235

184236
logger.info(
185-
f"Running {trace_label} with {len(accessibility_df.index)} orig zones {len(land_use_df)} dest zones"
237+
f"Running {trace_label} with {len(accessibility_df.index)} orig zones "
238+
f"{len(land_use_df)} dest zones"
186239
)
187240

188241
accessibilities_list = []
242+
explicit_chunk_size = model_settings.explicit_chunk
189243

190244
for (
191-
i,
245+
_i,
192246
chooser_chunk,
193-
chunk_trace_label,
247+
_chunk_trace_label,
194248
chunk_sizer,
195-
) in chunk.adaptive_chunked_choosers(state, accessibility_df, trace_label):
196-
249+
) in chunk.adaptive_chunked_choosers(
250+
state, accessibility_df, trace_label, explicit_chunk_size=explicit_chunk_size
251+
):
197252
accessibilities = compute_accessibilities_for_zones(
198253
state,
199254
chooser_chunk,
@@ -211,4 +266,4 @@ def compute_accessibility(
211266
logger.info(f"{trace_label} computed accessibilities {accessibility_df.shape}")
212267

213268
# - write table to pipeline
214-
state.add_table("accessibility", accessibility_df)
269+
state.add_table(output_table_name, accessibility_df)

0 commit comments

Comments
 (0)