Skip to content

Commit 4a7e474

Browse files
authored
Merge branch 'master' into feature/add-artist-to-item-entry-template
2 parents 8126eaa + bde5de4 commit 4a7e474

40 files changed

+974
-751
lines changed

.github/workflows/ci.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ jobs:
3333
if: matrix.platform == 'ubuntu-latest'
3434
run: |
3535
sudo apt update
36-
sudo apt install ffmpeg gobject-introspection libcairo2-dev libgirepository-2.0-dev pandoc imagemagick
36+
sudo apt install --yes --no-install-recommends ffmpeg gobject-introspection gstreamer1.0-plugins-base python3-gst-1.0 libcairo2-dev libgirepository-2.0-dev pandoc imagemagick
3737
3838
- name: Get changed lyrics files
3939
id: lyrics-update

.github/workflows/lint.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,6 @@ jobs:
105105

106106
- name: Type check code
107107
uses: liskin/gh-problem-matcher-wrap@v3
108-
continue-on-error: true
109108
with:
110109
linters: mypy
111110
run: poe check-types --show-column-numbers --no-error-summary ${{ needs.changed-files.outputs.changed_python_files }}

beets/autotag/distance.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from jellyfish import levenshtein_distance
99
from unidecode import unidecode
1010

11-
from beets import config, plugins
11+
from beets import config, metadata_plugins
1212
from beets.util import as_string, cached_classproperty, get_most_common_tags
1313

1414
if TYPE_CHECKING:
@@ -409,7 +409,7 @@ def track_distance(
409409
dist.add_expr("medium", item.disc != track_info.medium)
410410

411411
# Plugins.
412-
dist.update(plugins.track_distance(item, track_info))
412+
dist.update(metadata_plugins.track_distance(item, track_info))
413413

414414
return dist
415415

@@ -526,6 +526,6 @@ def distance(
526526
dist.add("unmatched_tracks", 1.0)
527527

528528
# Plugins.
529-
dist.update(plugins.album_distance(items, album_info, mapping))
529+
dist.update(metadata_plugins.album_distance(items, album_info, mapping))
530530

531531
return dist

beets/autotag/match.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
import lap
2525
import numpy as np
2626

27-
from beets import config, logging, plugins
27+
from beets import config, logging, metadata_plugins
2828
from beets.autotag import AlbumInfo, AlbumMatch, TrackInfo, TrackMatch, hooks
2929
from beets.util import get_most_common_tags
3030

@@ -119,7 +119,7 @@ def match_by_id(items: Iterable[Item]) -> AlbumInfo | None:
119119
return None
120120
# If all album IDs are equal, look up the album.
121121
log.debug("Searching for discovered album ID: {0}", first)
122-
return plugins.album_for_id(first)
122+
return metadata_plugins.album_for_id(first)
123123

124124

125125
def _recommendation(
@@ -274,7 +274,7 @@ def tag_album(
274274
if search_ids:
275275
for search_id in search_ids:
276276
log.debug("Searching for album ID: {0}", search_id)
277-
if info := plugins.album_for_id(search_id):
277+
if info := metadata_plugins.album_for_id(search_id):
278278
_add_candidate(items, candidates, info)
279279

280280
# Use existing metadata or text search.
@@ -311,7 +311,7 @@ def tag_album(
311311
log.debug("Album might be VA: {0}", va_likely)
312312

313313
# Get the results from the data sources.
314-
for matched_candidate in plugins.candidates(
314+
for matched_candidate in metadata_plugins.candidates(
315315
items, search_artist, search_album, va_likely
316316
):
317317
_add_candidate(items, candidates, matched_candidate)
@@ -346,7 +346,7 @@ def tag_item(
346346
if trackids:
347347
for trackid in trackids:
348348
log.debug("Searching for track ID: {0}", trackid)
349-
if info := plugins.track_for_id(trackid):
349+
if info := metadata_plugins.track_for_id(trackid):
350350
dist = track_distance(item, info, incl_artist=True)
351351
candidates[info.track_id] = hooks.TrackMatch(dist, info)
352352
# If this is a good match, then don't keep searching.
@@ -372,7 +372,7 @@ def tag_item(
372372
log.debug("Item search terms: {0} - {1}", search_artist, search_title)
373373

374374
# Get and evaluate candidate metadata.
375-
for track_info in plugins.item_candidates(
375+
for track_info in metadata_plugins.item_candidates(
376376
item, search_artist, search_title
377377
):
378378
dist = track_distance(item, track_info, incl_artist=True)

beets/dbcore/db.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -289,19 +289,22 @@ class Model(ABC, Generic[D]):
289289
terms.
290290
"""
291291

292-
_types: dict[str, types.Type] = {}
293-
"""Optional Types for non-fixed (i.e., flexible and computed) fields.
294-
"""
292+
@cached_classproperty
293+
def _types(cls) -> dict[str, types.Type]:
294+
"""Optional types for non-fixed (flexible and computed) fields."""
295+
return {}
295296

296297
_sorts: dict[str, type[FieldSort]] = {}
297298
"""Optional named sort criteria. The keys are strings and the values
298299
are subclasses of `Sort`.
299300
"""
300301

301-
_queries: dict[str, FieldQueryType] = {}
302-
"""Named queries that use a field-like `name:value` syntax but which
303-
do not relate to any specific field.
304-
"""
302+
@cached_classproperty
303+
def _queries(cls) -> dict[str, FieldQueryType]:
304+
"""Named queries that use a field-like `name:value` syntax but which
305+
do not relate to any specific field.
306+
"""
307+
return {}
305308

306309
_always_dirty = False
307310
"""By default, fields only become "dirty" when their value actually

beets/dbcore/query.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
from typing import TYPE_CHECKING, Any, Generic, TypeVar, Union
2929

3030
from beets import util
31+
from beets.util.units import raw_seconds_short
3132

3233
if TYPE_CHECKING:
3334
from beets.dbcore.db import AnyModel, Model
@@ -892,7 +893,7 @@ def _convert(self, s: str) -> float | None:
892893
if not s:
893894
return None
894895
try:
895-
return util.raw_seconds_short(s)
896+
return raw_seconds_short(s)
896897
except ValueError:
897898
try:
898899
return float(s)

beets/dbcore/types.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -292,7 +292,7 @@ class DelimitedString(BaseString[list[str], list[str]]):
292292
containing delimiter-separated values.
293293
"""
294294

295-
model_type = list
295+
model_type = list[str]
296296

297297
def __init__(self, delimiter: str):
298298
self.delimiter = delimiter

beets/importer/stages.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@ def query_tasks(session: ImportSession):
7070
Instead of finding files from the filesystem, a query is used to
7171
match items from the library.
7272
"""
73+
task: ImportTask
7374
if session.config["singletons"]:
7475
# Search for items.
7576
for item in session.lib.items(session.query):
@@ -143,9 +144,7 @@ def lookup_candidates(session: ImportSession, task: ImportTask):
143144

144145
# Restrict the initial lookup to IDs specified by the user via the -m
145146
# option. Currently all the IDs are passed onto the tasks directly.
146-
task.search_ids = session.config["search_ids"].as_str_seq()
147-
148-
task.lookup_candidates()
147+
task.lookup_candidates(session.config["search_ids"].as_str_seq())
149148

150149

151150
@pipeline.stage

beets/importer/tasks.py

Lines changed: 50 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
from collections import defaultdict
2323
from enum import Enum
2424
from tempfile import mkdtemp
25-
from typing import TYPE_CHECKING, Callable, Iterable, Sequence
25+
from typing import TYPE_CHECKING, Any, Callable, Iterable, Sequence
2626

2727
import mediafile
2828

@@ -32,6 +32,8 @@
3232
from .state import ImportState
3333

3434
if TYPE_CHECKING:
35+
from beets.autotag.match import Recommendation
36+
3537
from .session import ImportSession
3638

3739
# Global logger.
@@ -159,6 +161,7 @@ class ImportTask(BaseImportTask):
159161
cur_album: str | None = None
160162
cur_artist: str | None = None
161163
candidates: Sequence[autotag.AlbumMatch | autotag.TrackMatch] = []
164+
rec: Recommendation | None = None
162165

163166
def __init__(
164167
self,
@@ -167,11 +170,9 @@ def __init__(
167170
items: Iterable[library.Item] | None,
168171
):
169172
super().__init__(toppath, paths, items)
170-
self.rec = None
171173
self.should_remove_duplicates = False
172174
self.should_merge_duplicates = False
173175
self.is_album = True
174-
self.search_ids = [] # user-supplied candidate IDs.
175176

176177
def set_choice(
177178
self, choice: Action | autotag.AlbumMatch | autotag.TrackMatch
@@ -356,20 +357,17 @@ def handle_created(self, session: ImportSession):
356357
tasks = [t for inner in tasks for t in inner]
357358
return tasks
358359

359-
def lookup_candidates(self):
360-
"""Retrieve and store candidates for this album. User-specified
361-
candidate IDs are stored in self.search_ids: if present, the
362-
initial lookup is restricted to only those IDs.
360+
def lookup_candidates(self, search_ids: list[str]) -> None:
361+
"""Retrieve and store candidates for this album.
362+
363+
If User-specified ``search_ids`` list is not empty, the lookup is
364+
restricted to only those IDs.
363365
"""
364-
artist, album, prop = autotag.tag_album(
365-
self.items, search_ids=self.search_ids
366+
self.cur_artist, self.cur_album, (self.candidates, self.rec) = (
367+
autotag.tag_album(self.items, search_ids=search_ids)
366368
)
367-
self.cur_artist = artist
368-
self.cur_album = album
369-
self.candidates = prop.candidates
370-
self.rec = prop.recommendation
371369

372-
def find_duplicates(self, lib: library.Library):
370+
def find_duplicates(self, lib: library.Library) -> list[library.Album]:
373371
"""Return a list of albums from `lib` with the same artist and
374372
album name as the task.
375373
"""
@@ -695,12 +693,12 @@ def _emit_imported(self, lib):
695693
for item in self.imported_items():
696694
plugins.send("item_imported", lib=lib, item=item)
697695

698-
def lookup_candidates(self):
699-
prop = autotag.tag_item(self.item, search_ids=self.search_ids)
700-
self.candidates = prop.candidates
701-
self.rec = prop.recommendation
696+
def lookup_candidates(self, search_ids: list[str]) -> None:
697+
self.candidates, self.rec = autotag.tag_item(
698+
self.item, search_ids=search_ids
699+
)
702700

703-
def find_duplicates(self, lib):
701+
def find_duplicates(self, lib: library.Library) -> list[library.Item]: # type: ignore[override] # Need splitting Singleton and Album tasks into separate classes
704702
"""Return a list of items from `lib` that have the same artist
705703
and title as the task.
706704
"""
@@ -802,6 +800,11 @@ def _emit_imported(self, lib):
802800
pass
803801

804802

803+
ArchiveHandler = tuple[
804+
Callable[[util.StrPath], bool], Callable[[util.StrPath], Any]
805+
]
806+
807+
805808
class ArchiveImportTask(SentinelImportTask):
806809
"""An import task that represents the processing of an archive.
807810
@@ -827,42 +830,41 @@ def is_archive(cls, path):
827830
if not os.path.isfile(path):
828831
return False
829832

830-
for path_test, _ in cls.handlers():
833+
for path_test, _ in cls.handlers:
831834
if path_test(os.fsdecode(path)):
832835
return True
833836
return False
834837

835-
@classmethod
836-
def handlers(cls):
838+
@util.cached_classproperty
839+
def handlers(cls) -> list[ArchiveHandler]:
837840
"""Returns a list of archive handlers.
838841
839842
Each handler is a `(path_test, ArchiveClass)` tuple. `path_test`
840843
is a function that returns `True` if the given path can be
841844
handled by `ArchiveClass`. `ArchiveClass` is a class that
842845
implements the same interface as `tarfile.TarFile`.
843846
"""
844-
if not hasattr(cls, "_handlers"):
845-
cls._handlers: list[tuple[Callable, ...]] = []
846-
from zipfile import ZipFile, is_zipfile
847+
_handlers: list[ArchiveHandler] = []
848+
from zipfile import ZipFile, is_zipfile
847849

848-
cls._handlers.append((is_zipfile, ZipFile))
849-
import tarfile
850+
_handlers.append((is_zipfile, ZipFile))
851+
import tarfile
850852

851-
cls._handlers.append((tarfile.is_tarfile, tarfile.open))
852-
try:
853-
from rarfile import RarFile, is_rarfile
854-
except ImportError:
855-
pass
856-
else:
857-
cls._handlers.append((is_rarfile, RarFile))
858-
try:
859-
from py7zr import SevenZipFile, is_7zfile
860-
except ImportError:
861-
pass
862-
else:
863-
cls._handlers.append((is_7zfile, SevenZipFile))
853+
_handlers.append((tarfile.is_tarfile, tarfile.open))
854+
try:
855+
from rarfile import RarFile, is_rarfile
856+
except ImportError:
857+
pass
858+
else:
859+
_handlers.append((is_rarfile, RarFile))
860+
try:
861+
from py7zr import SevenZipFile, is_7zfile
862+
except ImportError:
863+
pass
864+
else:
865+
_handlers.append((is_7zfile, SevenZipFile))
864866

865-
return cls._handlers
867+
return _handlers
866868

867869
def cleanup(self, copy=False, delete=False, move=False):
868870
"""Removes the temporary directory the archive was extracted to."""
@@ -879,7 +881,7 @@ def extract(self):
879881
"""
880882
assert self.toppath is not None, "toppath must be set"
881883

882-
for path_test, handler_class in self.handlers():
884+
for path_test, handler_class in self.handlers:
883885
if path_test(os.fsdecode(self.toppath)):
884886
break
885887
else:
@@ -925,7 +927,7 @@ def __init__(self, toppath: util.PathBytes, session: ImportSession):
925927
self.imported = 0 # "Real" tasks created.
926928
self.is_archive = ArchiveImportTask.is_archive(util.syspath(toppath))
927929

928-
def tasks(self):
930+
def tasks(self) -> Iterable[ImportTask]:
929931
"""Yield all import tasks for music found in the user-specified
930932
path `self.toppath`. Any necessary sentinel tasks are also
931933
produced.
@@ -1114,7 +1116,10 @@ def albums_in_dir(path: util.PathBytes):
11141116
a list of Items that is probably an album. Specifically, any folder
11151117
containing any media files is an album.
11161118
"""
1117-
collapse_pat = collapse_paths = collapse_items = None
1119+
collapse_paths: list[util.PathBytes] = []
1120+
collapse_items: list[util.PathBytes] = []
1121+
collapse_pat = None
1122+
11181123
ignore: list[str] = config["ignore"].as_str_seq()
11191124
ignore_hidden: bool = config["ignore_hidden"].get(bool)
11201125

@@ -1139,7 +1144,7 @@ def albums_in_dir(path: util.PathBytes):
11391144
# proceed to process the current one.
11401145
if collapse_items:
11411146
yield collapse_paths, collapse_items
1142-
collapse_pat = collapse_paths = collapse_items = None
1147+
collapse_pat, collapse_paths, collapse_items = None, [], []
11431148

11441149
# Check whether this directory looks like the *first* directory
11451150
# in a multi-disc sequence. There are two indicators: the file

0 commit comments

Comments
 (0)