Skip to content

Commit c1a610c

Browse files
janoshJaGeo
andauthored
Officially support Python 3.12 and test in CI (#3685)
* add python 3.12 to officially supported versions and test it in CI * down pin chgnet>=0.3.0 * fix weird typo nrafo_ew_tstructs * don't depend on tblite above 3.11 since unsupported tblite/tblite#175 * improve TestVasprun.test_standard * drop Lobsterin inerheritance from UserDict, use simple dict instead and modify __getitem__ to get the salient __getitem__ behavior from UserDict * try DotDict as super class for Lobsterin * override Lobsterin.__contains__ to fix on py312 --------- Co-authored-by: JaGeo <[email protected]>
1 parent 5c8b51c commit c1a610c

File tree

14 files changed

+203
-188
lines changed

14 files changed

+203
-188
lines changed

.github/workflows/release.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ jobs:
2929
- uses: actions/setup-python@v5
3030
name: Install Python
3131
with:
32-
python-version: "3.11"
32+
python-version: "3.12"
3333

3434
- name: Build sdist
3535
run: |
@@ -45,7 +45,7 @@ jobs:
4545
strategy:
4646
matrix:
4747
os: [ubuntu-latest, macos-14, windows-latest]
48-
python-version: ["39", "310", "311"]
48+
python-version: ["39", "310", "311", "312"]
4949
runs-on: ${{ matrix.os }}
5050
steps:
5151
- name: Check out repo
@@ -68,10 +68,10 @@ jobs:
6868
# For pypi trusted publishing
6969
id-token: write
7070
steps:
71-
- name: Set up Python 3.11
71+
- name: Set up Python
7272
uses: actions/setup-python@v5
7373
with:
74-
python-version: 3.11
74+
python-version: "3.12"
7575

7676
- name: Get build artifacts
7777
uses: actions/download-artifact@v3

.github/workflows/test.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,16 +27,16 @@ jobs:
2727
matrix:
2828
# pytest-split automatically distributes work load so parallel jobs finish in similar time
2929
os: [ubuntu-latest, windows-latest]
30-
python-version: ["3.9", "3.11"]
30+
python-version: ["3.9", "3.12"]
3131
split: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
3232
# include/exclude is meant to maximize CI coverage of different platforms and python
3333
# versions while minimizing the total number of jobs. We run all pytest splits with the
3434
# oldest supported python version (currently 3.9) on windows (seems most likely to surface
35-
# errors) and with newest version (currently 3.11) on ubuntu (to get complete and speedy
35+
# errors) and with newest version (currently 3.12) on ubuntu (to get complete and speedy
3636
# coverage on unix). We ignore mac-os, which is assumed to be similar to ubuntu.
3737
exclude:
3838
- os: windows-latest
39-
python-version: "3.11"
39+
python-version: "3.12"
4040
- os: ubuntu-latest
4141
python-version: "3.9"
4242

dev_scripts/chemenv/get_plane_permutations_optimized.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -209,7 +209,7 @@ def random_permutations_iterator(initial_permutation, n_permutations):
209209
# Definition of the facets
210210
all_planes_point_indices = [algo.plane_points]
211211
if algo.other_plane_points is not None:
212-
all_planes_point_indices.extend(algo.other_plane_points)
212+
all_planes_point_indices += algo.other_plane_points
213213

214214
# Loop on the facets
215215
explicit_permutations_per_plane = []
@@ -305,7 +305,7 @@ def random_permutations_iterator(initial_permutation, n_permutations):
305305
# Definition of the facets
306306
all_planes_point_indices = [algo.plane_points]
307307
if algo.other_plane_points is not None:
308-
all_planes_point_indices.extend(algo.other_plane_points)
308+
all_planes_point_indices += algo.other_plane_points
309309

310310
# Setup of the permutations to be used for this algorithm
311311

dev_scripts/chemenv/plane_multiplicity.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,11 @@
1212
__date__ = "Feb 20, 2016"
1313

1414
if __name__ == "__main__":
15-
allcg = AllCoordinationGeometries()
15+
all_cg = AllCoordinationGeometries()
1616

1717
cg_symbol = "I:12"
1818
all_plane_points = []
19-
cg = allcg[cg_symbol]
19+
cg = all_cg[cg_symbol]
2020

2121
# I:12
2222
if cg_symbol == "I:12":
@@ -25,7 +25,7 @@
2525
for edge in edges:
2626
opposite_edge = [opposite_points[edge[0]], opposite_points[edge[1]]]
2727
equiv_plane = list(edge)
28-
equiv_plane.extend(opposite_edge)
28+
equiv_plane += opposite_edge
2929
equiv_plane.sort()
3030
all_plane_points.append(tuple(equiv_plane))
3131
all_plane_points = [tuple(equiv_plane) for equiv_plane in set(all_plane_points)]

pymatgen/alchemy/materials.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -228,7 +228,7 @@ def __str__(self) -> str:
228228
for hist in self.history:
229229
hist.pop("input_structure", None)
230230
output.append(str(hist))
231-
output.extend(("\nOther parameters", "------------", str(self.other_parameters)))
231+
output += ("\nOther parameters", "------------", str(self.other_parameters))
232232
return "\n".join(output)
233233

234234
def set_parameter(self, key: str, value: Any) -> TransformedStructure:

pymatgen/alchemy/transmuters.py

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@
2222

2323
from typing_extensions import Self
2424

25+
from pymatgen.alchemy.filters import AbstractStructureFilter
26+
2527
__author__ = "Shyue Ping Ong, Will Richards"
2628
__copyright__ = "Copyright 2012, The Materials Project"
2729
__version__ = "0.1"
@@ -40,7 +42,7 @@ class StandardTransmuter:
4042

4143
def __init__(
4244
self,
43-
transformed_structures,
45+
transformed_structures: list[TransformedStructure],
4446
transformations=None,
4547
extend_collection: int = 0,
4648
ncores: int | None = None,
@@ -130,8 +132,8 @@ def append_transformation(self, transformation, extend_collection=False, clear_r
130132
for x in self.transformed_structures:
131133
new = x.append_transformation(transformation, extend_collection, clear_redo=clear_redo)
132134
if new is not None:
133-
new_structures.extend(new)
134-
self.transformed_structures.extend(new_structures)
135+
new_structures += new
136+
self.transformed_structures += new_structures
135137

136138
def extend_transformations(self, transformations):
137139
"""Extend a sequence of transformations to the TransformedStructure.
@@ -142,18 +144,16 @@ def extend_transformations(self, transformations):
142144
for trafo in transformations:
143145
self.append_transformation(trafo)
144146

145-
def apply_filter(self, structure_filter):
147+
def apply_filter(self, structure_filter: AbstractStructureFilter):
146148
"""Apply a structure_filter to the list of TransformedStructures
147149
in the transmuter.
148150
149151
Args:
150152
structure_filter: StructureFilter to apply.
151153
"""
152-
153-
def test_transformed_structure(ts):
154-
return structure_filter.test(ts.final_structure)
155-
156-
self.transformed_structures = list(filter(test_transformed_structure, self.transformed_structures))
154+
self.transformed_structures = list(
155+
filter(lambda ts: structure_filter.test(ts.final_structure), self.transformed_structures)
156+
)
157157
for ts in self.transformed_structures:
158158
ts.append_filter(structure_filter)
159159

@@ -174,8 +174,8 @@ def set_parameter(self, key, value):
174174
key: The key for the parameter.
175175
value: The value for the parameter.
176176
"""
177-
for x in self.transformed_structures:
178-
x.other_parameters[key] = value
177+
for struct in self.transformed_structures:
178+
struct.other_parameters[key] = value
179179

180180
def add_tags(self, tags):
181181
"""Add tags for the structures generated by the transmuter.
@@ -196,11 +196,11 @@ def append_transformed_structures(self, trafo_structs_or_transmuter):
196196
transmuter.
197197
"""
198198
if isinstance(trafo_structs_or_transmuter, self.__class__):
199-
self.transformed_structures.extend(trafo_structs_or_transmuter.transformed_structures)
199+
self.transformed_structures += trafo_structs_or_transmuter.transformed_structures
200200
else:
201201
for ts in trafo_structs_or_transmuter:
202202
assert isinstance(ts, TransformedStructure)
203-
self.transformed_structures.extend(trafo_structs_or_transmuter)
203+
self.transformed_structures += trafo_structs_or_transmuter
204204

205205
@classmethod
206206
def from_structures(cls, structures, transformations=None, extend_collection=0) -> Self:
@@ -219,8 +219,8 @@ def from_structures(cls, structures, transformations=None, extend_collection=0)
219219
Returns:
220220
StandardTransmuter
221221
"""
222-
trafo_struct = [TransformedStructure(s, []) for s in structures]
223-
return cls(trafo_struct, transformations, extend_collection)
222+
t_struct = [TransformedStructure(s, []) for s in structures]
223+
return cls(t_struct, transformations, extend_collection)
224224

225225

226226
class CifTransmuter(StandardTransmuter):
@@ -253,8 +253,8 @@ def __init__(self, cif_string, transformations=None, primitive=True, extend_coll
253253
if read_data:
254254
structure_data[-1].append(line)
255255
for data in structure_data:
256-
trafo_struct = TransformedStructure.from_cif_str("\n".join(data), [], primitive)
257-
transformed_structures.append(trafo_struct)
256+
t_struct = TransformedStructure.from_cif_str("\n".join(data), [], primitive)
257+
transformed_structures.append(t_struct)
258258
super().__init__(transformed_structures, transformations, extend_collection)
259259

260260
@classmethod
@@ -293,8 +293,8 @@ def __init__(self, poscar_string, transformations=None, extend_collection=False)
293293
extend_collection: Whether to use more than one output structure
294294
from one-to-many transformations.
295295
"""
296-
trafo_struct = TransformedStructure.from_poscar_str(poscar_string, [])
297-
super().__init__([trafo_struct], transformations, extend_collection=extend_collection)
296+
t_struct = TransformedStructure.from_poscar_str(poscar_string, [])
297+
super().__init__([t_struct], transformations, extend_collection=extend_collection)
298298

299299
@classmethod
300300
def from_filenames(cls, poscar_filenames, transformations=None, extend_collection=False) -> StandardTransmuter:
@@ -373,7 +373,7 @@ def _apply_transformation(inputs):
373373
"""
374374
ts, transformation, extend_collection, clear_redo = inputs
375375
new = ts.append_transformation(transformation, extend_collection, clear_redo=clear_redo)
376-
o = [ts]
376+
out = [ts]
377377
if new:
378-
o.extend(new)
379-
return o
378+
out += new
379+
return out

pymatgen/analysis/structure_prediction/substitutor.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -119,10 +119,10 @@ def pred_from_structures(
119119
raise ValueError("the species in target_species are not allowed for the probability model you are using")
120120

121121
for permutation in itertools.permutations(target_species):
122-
for s in structures_list:
122+
for dct in structures_list:
123123
# check if: species are in the domain,
124124
# and the probability of subst. is above the threshold
125-
els = s["structure"].elements
125+
els = dct["structure"].elements
126126
if (
127127
len(els) == len(permutation)
128128
and len(set(els) & set(self.get_allowed_species())) == len(els)
@@ -135,18 +135,18 @@ def pred_from_structures(
135135

136136
transf = SubstitutionTransformation(clean_subst)
137137

138-
if Substitutor._is_charge_balanced(transf.apply_transformation(s["structure"])):
139-
ts = TransformedStructure(
140-
s["structure"],
138+
if Substitutor._is_charge_balanced(transf.apply_transformation(dct["structure"])):
139+
t_struct = TransformedStructure(
140+
dct["structure"],
141141
[transf],
142-
history=[{"source": s["id"]}],
142+
history=[{"source": dct["id"]}],
143143
other_parameters={
144144
"type": "structure_prediction",
145145
"proba": self._sp.cond_prob_list(permutation, els),
146146
},
147147
)
148-
result.append(ts)
149-
transmuter.append_transformed_structures([ts])
148+
result.append(t_struct)
149+
transmuter.append_transformed_structures([t_struct])
150150

151151
if remove_duplicates:
152152
transmuter.apply_filter(RemoveDuplicatesFilter(symprec=self._symprec))

pymatgen/io/lobster/inputs.py

Lines changed: 40 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ def __init__(self, settingsdict: dict):
132132
raise KeyError("There are duplicates for the keywords!")
133133
self.update(settingsdict)
134134

135-
def __setitem__(self, key, val):
135+
def __setitem__(self, key, val) -> None:
136136
"""
137137
Add parameter-val pair to Lobsterin. Warns if parameter is not in list of
138138
valid lobsterin tags. Also cleans the parameter and val by stripping
@@ -146,14 +146,25 @@ def __setitem__(self, key, val):
146146

147147
super().__setitem__(new_key, val.strip() if isinstance(val, str) else val)
148148

149-
def __getitem__(self, item):
149+
def __getitem__(self, key) -> Any:
150150
"""Implements getitem from dict to avoid problems with cases."""
151-
new_item = next((key_here for key_here in self if item.strip().lower() == key_here.lower()), item)
151+
normalized_key = next((k for k in self if key.strip().lower() == k.lower()), key)
152152

153-
if new_item.lower() not in [element.lower() for element in Lobsterin.AVAILABLE_KEYWORDS]:
154-
raise KeyError("Key is currently not available")
153+
key_is_unknown = normalized_key.lower() not in map(str.lower, Lobsterin.AVAILABLE_KEYWORDS)
154+
if key_is_unknown or normalized_key not in self.data:
155+
raise KeyError(f"{key=} is not available")
156+
157+
return self.data[normalized_key]
158+
159+
def __contains__(self, key) -> bool:
160+
"""Implements getitem from dict to avoid problems with cases."""
161+
normalized_key = next((k for k in self if key.strip().lower() == k.lower()), key)
162+
163+
key_is_unknown = normalized_key.lower() not in map(str.lower, Lobsterin.AVAILABLE_KEYWORDS)
164+
if key_is_unknown or normalized_key not in self.data:
165+
return False
155166

156-
return super().__getitem__(new_item)
167+
return True
157168

158169
def __delitem__(self, key):
159170
new_key = next((key_here for key_here in self if key.strip().lower() == key_here.lower()), key)
@@ -564,30 +575,30 @@ def from_file(cls, lobsterin: str) -> Self:
564575
lobsterin_dict: dict[str, Any] = {}
565576

566577
for datum in data:
567-
# Remove all comments
568-
if not datum.startswith(("!", "#", "//")):
569-
pattern = r"\b[^!#//]+" # exclude comments after commands
570-
if matched_pattern := re.findall(pattern, datum):
571-
raw_datum = matched_pattern[0].replace("\t", " ") # handle tab in between and end of command
572-
key_word = raw_datum.strip().split(" ") # extract keyword
573-
if len(key_word) > 1:
574-
# check which type of keyword this is, handle accordingly
575-
if key_word[0].lower() not in [datum2.lower() for datum2 in Lobsterin.LISTKEYWORDS]:
576-
if key_word[0].lower() not in [datum2.lower() for datum2 in Lobsterin.FLOAT_KEYWORDS]:
577-
if key_word[0].lower() not in lobsterin_dict:
578-
lobsterin_dict[key_word[0].lower()] = " ".join(key_word[1:])
579-
else:
580-
raise ValueError(f"Same keyword {key_word[0].lower()} twice!")
581-
elif key_word[0].lower() not in lobsterin_dict:
582-
lobsterin_dict[key_word[0].lower()] = float(key_word[1])
583-
else:
584-
raise ValueError(f"Same keyword {key_word[0].lower()} twice!")
585-
elif key_word[0].lower() not in lobsterin_dict:
586-
lobsterin_dict[key_word[0].lower()] = [" ".join(key_word[1:])]
578+
if datum.startswith(("!", "#", "//")):
579+
continue # ignore comments
580+
pattern = r"\b[^!#//]+" # exclude comments after commands
581+
if matched_pattern := re.findall(pattern, datum):
582+
raw_datum = matched_pattern[0].replace("\t", " ") # handle tab in between and end of command
583+
key_word = raw_datum.strip().split(" ") # extract keyword
584+
key = key_word[0].lower()
585+
if len(key_word) > 1:
586+
# check which type of keyword this is, handle accordingly
587+
if key not in [datum2.lower() for datum2 in Lobsterin.LISTKEYWORDS]:
588+
if key not in [datum2.lower() for datum2 in Lobsterin.FLOAT_KEYWORDS]:
589+
if key in lobsterin_dict:
590+
raise ValueError(f"Same keyword {key} twice!")
591+
lobsterin_dict[key] = " ".join(key_word[1:])
592+
elif key in lobsterin_dict:
593+
raise ValueError(f"Same keyword {key} twice!")
587594
else:
588-
lobsterin_dict[key_word[0].lower()].append(" ".join(key_word[1:]))
589-
elif len(key_word) > 0:
590-
lobsterin_dict[key_word[0].lower()] = True
595+
lobsterin_dict[key] = float("nan" if key_word[1].strip() == "None" else key_word[1])
596+
elif key not in lobsterin_dict:
597+
lobsterin_dict[key] = [" ".join(key_word[1:])]
598+
else:
599+
lobsterin_dict[key].append(" ".join(key_word[1:]))
600+
elif len(key_word) > 0:
601+
lobsterin_dict[key] = True
591602

592603
return cls(lobsterin_dict)
593604

pymatgen/io/vasp/outputs.py

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1471,7 +1471,7 @@ def _parse_calculation(self, elem):
14711471
return istep
14721472

14731473
@staticmethod
1474-
def _parse_dos(elem):
1474+
def _parse_dos(elem) -> tuple[Dos, Dos, list[dict]]:
14751475
efermi = float(elem.find("i").text)
14761476
energies = None
14771477
tdensities = {}
@@ -1491,22 +1491,18 @@ def _parse_dos(elem):
14911491
orbs.pop(0)
14921492
lm = any("x" in s for s in orbs)
14931493
for s in partial.find("array").find("set").findall("set"):
1494-
pdos = defaultdict(dict)
1494+
pdos: dict[Orbital | OrbitalType, dict[Spin, np.ndarray]] = defaultdict(dict)
14951495

14961496
for ss in s.findall("set"):
14971497
spin = Spin.up if ss.attrib["comment"] == "spin 1" else Spin.down
14981498
data = np.array(_parse_vasp_array(ss))
1499-
_nrow, ncol = data.shape
1500-
for j in range(1, ncol):
1501-
orb = Orbital(j - 1) if lm else OrbitalType(j - 1)
1502-
pdos[orb][spin] = data[:, j]
1499+
_n_row, n_col = data.shape
1500+
for col_idx in range(1, n_col):
1501+
orb = Orbital(col_idx - 1) if lm else OrbitalType(col_idx - 1)
1502+
pdos[orb][spin] = data[:, col_idx] # type: ignore[index]
15031503
pdoss.append(pdos)
15041504
elem.clear()
1505-
return (
1506-
Dos(efermi, energies, tdensities),
1507-
Dos(efermi, energies, idensities),
1508-
pdoss,
1509-
)
1505+
return Dos(efermi, energies, tdensities), Dos(efermi, energies, idensities), pdoss
15101506

15111507
@staticmethod
15121508
def _parse_eigen(elem):

0 commit comments

Comments
 (0)