diff --git a/beets/__init__.py b/beets/__init__.py
index 910ed78ea5..21b8bb4ffb 100644
--- a/beets/__init__.py
+++ b/beets/__init__.py
@@ -13,28 +13,29 @@
# included in all copies or substantial portions of the Software.
-import confuse
from sys import stderr
-__version__ = '1.6.1'
-__author__ = 'Adrian Sampson '
+import confuse
+
+__version__ = "1.6.1"
+__author__ = "Adrian Sampson "
class IncludeLazyConfig(confuse.LazyConfig):
"""A version of Confuse's LazyConfig that also merges in data from
YAML files specified in an `include` setting.
"""
+
def read(self, user=True, defaults=True):
super().read(user, defaults)
try:
- for view in self['include']:
+ for view in self["include"]:
self.set_file(view.as_filename())
except confuse.NotFoundError:
pass
except confuse.ConfigReadError as err:
- stderr.write("configuration `import` failed: {}"
- .format(err.reason))
+ stderr.write("configuration `import` failed: {}".format(err.reason))
-config = IncludeLazyConfig('beets', __name__)
+config = IncludeLazyConfig("beets", __name__)
diff --git a/beets/__main__.py b/beets/__main__.py
index ac829de9f6..81995f7af7 100644
--- a/beets/__main__.py
+++ b/beets/__main__.py
@@ -18,6 +18,7 @@
import sys
+
from .ui import main
if __name__ == "__main__":
diff --git a/beets/art.py b/beets/art.py
index 6e0a5f82bf..466d40005d 100644
--- a/beets/art.py
+++ b/beets/art.py
@@ -17,19 +17,19 @@
"""
-from tempfile import NamedTemporaryFile
import os
+from tempfile import NamedTemporaryFile
-from beets.util import displayable_path, syspath, bytestring_path
-from beets.util.artresizer import ArtResizer
import mediafile
+from beets.util import bytestring_path, displayable_path, syspath
+from beets.util.artresizer import ArtResizer
+
def mediafile_image(image_path, maxwidth=None):
- """Return a `mediafile.Image` object for the path.
- """
+ """Return a `mediafile.Image` object for the path."""
- with open(syspath(image_path), 'rb') as f:
+ with open(syspath(image_path), "rb") as f:
data = f.read()
return mediafile.Image(data, type=mediafile.ImageType.front)
@@ -39,31 +39,43 @@ def get_art(log, item):
try:
mf = mediafile.MediaFile(syspath(item.path))
except mediafile.UnreadableFileError as exc:
- log.warning('Could not extract art from {0}: {1}',
- displayable_path(item.path), exc)
+ log.warning(
+ "Could not extract art from {0}: {1}",
+ displayable_path(item.path),
+ exc,
+ )
return
return mf.art
-def embed_item(log, item, imagepath, maxwidth=None, itempath=None,
- compare_threshold=0, ifempty=False, as_album=False, id3v23=None,
- quality=0):
- """Embed an image into the item's media file.
- """
+def embed_item(
+ log,
+ item,
+ imagepath,
+ maxwidth=None,
+ itempath=None,
+ compare_threshold=0,
+ ifempty=False,
+ as_album=False,
+ id3v23=None,
+ quality=0,
+):
+ """Embed an image into the item's media file."""
# Conditions.
if compare_threshold:
is_similar = check_art_similarity(
- log, item, imagepath, compare_threshold)
+ log, item, imagepath, compare_threshold
+ )
if is_similar is None:
- log.warning('Error while checking art similarity; skipping.')
+ log.warning("Error while checking art similarity; skipping.")
return
elif not is_similar:
- log.info('Image not similar; skipping.')
+ log.info("Image not similar; skipping.")
return
if ifempty and get_art(log, item):
- log.info('media file already contained art')
+ log.info("media file already contained art")
return
# Filters.
@@ -72,52 +84,74 @@ def embed_item(log, item, imagepath, maxwidth=None, itempath=None,
# Get the `Image` object from the file.
try:
- log.debug('embedding {0}', displayable_path(imagepath))
+ log.debug("embedding {0}", displayable_path(imagepath))
image = mediafile_image(imagepath, maxwidth)
except OSError as exc:
- log.warning('could not read image file: {0}', exc)
+ log.warning("could not read image file: {0}", exc)
return
# Make sure the image kind is safe (some formats only support PNG
# and JPEG).
- if image.mime_type not in ('image/jpeg', 'image/png'):
- log.info('not embedding image of unsupported type: {}',
- image.mime_type)
+ if image.mime_type not in ("image/jpeg", "image/png"):
+ log.info("not embedding image of unsupported type: {}", image.mime_type)
return
- item.try_write(path=itempath, tags={'images': [image]}, id3v23=id3v23)
+ item.try_write(path=itempath, tags={"images": [image]}, id3v23=id3v23)
-def embed_album(log, album, maxwidth=None, quiet=False, compare_threshold=0,
- ifempty=False, quality=0):
- """Embed album art into all of the album's items.
- """
+def embed_album(
+ log,
+ album,
+ maxwidth=None,
+ quiet=False,
+ compare_threshold=0,
+ ifempty=False,
+ quality=0,
+):
+ """Embed album art into all of the album's items."""
imagepath = album.artpath
if not imagepath:
- log.info('No album art present for {0}', album)
+ log.info("No album art present for {0}", album)
return
if not os.path.isfile(syspath(imagepath)):
- log.info('Album art not found at {0} for {1}',
- displayable_path(imagepath), album)
+ log.info(
+ "Album art not found at {0} for {1}",
+ displayable_path(imagepath),
+ album,
+ )
return
if maxwidth:
imagepath = resize_image(log, imagepath, maxwidth, quality)
- log.info('Embedding album art into {0}', album)
+ log.info("Embedding album art into {0}", album)
for item in album.items():
- embed_item(log, item, imagepath, maxwidth, None, compare_threshold,
- ifempty, as_album=True, quality=quality)
+ embed_item(
+ log,
+ item,
+ imagepath,
+ maxwidth,
+ None,
+ compare_threshold,
+ ifempty,
+ as_album=True,
+ quality=quality,
+ )
def resize_image(log, imagepath, maxwidth, quality):
"""Returns path to an image resized to maxwidth and encoded with the
specified quality level.
"""
- log.debug('Resizing album art to {0} pixels wide and encoding at quality \
- level {1}', maxwidth, quality)
- imagepath = ArtResizer.shared.resize(maxwidth, syspath(imagepath),
- quality=quality)
+ log.debug(
+ "Resizing album art to {0} pixels wide and encoding at quality \
+ level {1}",
+ maxwidth,
+ quality,
+ )
+ imagepath = ArtResizer.shared.resize(
+ maxwidth, syspath(imagepath), quality=quality
+ )
return imagepath
@@ -151,20 +185,22 @@ def extract(log, outpath, item):
art = get_art(log, item)
outpath = bytestring_path(outpath)
if not art:
- log.info('No album art present in {0}, skipping.', item)
+ log.info("No album art present in {0}, skipping.", item)
return
# Add an extension to the filename.
ext = mediafile.image_extension(art)
if not ext:
- log.warning('Unknown image type in {0}.',
- displayable_path(item.path))
+ log.warning("Unknown image type in {0}.", displayable_path(item.path))
return
- outpath += bytestring_path('.' + ext)
-
- log.info('Extracting album art from: {0} to: {1}',
- item, displayable_path(outpath))
- with open(syspath(outpath), 'wb') as f:
+ outpath += bytestring_path("." + ext)
+
+ log.info(
+ "Extracting album art from: {0} to: {1}",
+ item,
+ displayable_path(outpath),
+ )
+ with open(syspath(outpath), "wb") as f:
f.write(art)
return outpath
@@ -178,7 +214,7 @@ def extract_first(log, outpath, items):
def clear(log, lib, query):
items = lib.items(query)
- log.info('Clearing album art from {0} items', len(items))
+ log.info("Clearing album art from {0} items", len(items))
for item in items:
- log.debug('Clearing art for {0}', item)
- item.try_write(tags={'images': None})
+ log.debug("Clearing art for {0}", item)
+ item.try_write(tags={"images": None})
diff --git a/beets/autotag/__init__.py b/beets/autotag/__init__.py
index 59b62385f3..54a9d55460 100644
--- a/beets/autotag/__init__.py
+++ b/beets/autotag/__init__.py
@@ -16,74 +16,72 @@
"""
from typing import Mapping
+from beets import config, logging
from beets.library import Item
-from beets import logging
-from beets import config
-
# Parts of external interface.
from .hooks import ( # noqa
AlbumInfo,
- TrackInfo,
AlbumMatch,
- TrackMatch,
Distance,
+ TrackInfo,
+ TrackMatch,
)
-from .match import tag_item, tag_album, current_metadata, Proposal # noqa
from .match import Recommendation # noqa
+from .match import Proposal, current_metadata, tag_album, tag_item # noqa
# Global logger.
-log = logging.getLogger('beets')
+log = logging.getLogger("beets")
# Metadata fields that are already hardcoded, or where the tag name changes.
SPECIAL_FIELDS = {
- 'album': (
- 'va',
- 'releasegroup_id',
- 'artist_id',
- 'artists_ids',
- 'album_id',
- 'mediums',
- 'tracks',
- 'year',
- 'month',
- 'day',
- 'artist',
- 'artists',
- 'artist_credit',
- 'artists_credit',
- 'artist_sort',
- 'artists_sort',
- 'data_url'
+ "album": (
+ "va",
+ "releasegroup_id",
+ "artist_id",
+ "artists_ids",
+ "album_id",
+ "mediums",
+ "tracks",
+ "year",
+ "month",
+ "day",
+ "artist",
+ "artists",
+ "artist_credit",
+ "artists_credit",
+ "artist_sort",
+ "artists_sort",
+ "data_url",
+ ),
+ "track": (
+ "track_alt",
+ "artist_id",
+ "artists_ids",
+ "release_track_id",
+ "medium",
+ "index",
+ "medium_index",
+ "title",
+ "artist_credit",
+ "artists_credit",
+ "artist_sort",
+ "artists_sort",
+ "artist",
+ "artists",
+ "track_id",
+ "medium_total",
+ "data_url",
+ "length",
),
- 'track': (
- 'track_alt',
- 'artist_id',
- 'artists_ids',
- 'release_track_id',
- 'medium',
- 'index',
- 'medium_index',
- 'title',
- 'artist_credit',
- 'artists_credit',
- 'artist_sort',
- 'artists_sort',
- 'artist',
- 'artists',
- 'track_id',
- 'medium_total',
- 'data_url',
- 'length'
- )
}
# Additional utilities for the main interface.
+
def apply_item_metadata(item: Item, track_info: TrackInfo):
- """Set an item's metadata from its matched TrackInfo object.
- """
+ """Set an item's metadata from its matched TrackInfo object."""
item.artist = track_info.artist
item.artists = track_info.artists
item.artist_sort = track_info.artist_sort
@@ -100,7 +98,7 @@ def apply_item_metadata(item: Item, track_info: TrackInfo):
for field, value in track_info.items():
# We only overwrite fields that are not already hardcoded.
- if field in SPECIAL_FIELDS['track']:
+ if field in SPECIAL_FIELDS["track"]:
continue
if value is None:
continue
@@ -116,22 +114,24 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]):
"""
for item, track_info in mapping.items():
# Artist or artist credit.
- if config['artist_credit']:
- item.artist = (track_info.artist_credit or
- track_info.artist or
- album_info.artist_credit or
- album_info.artist)
- item.artists = (track_info.artists_credit or
- track_info.artists or
- album_info.artists_credit or
- album_info.artists)
- item.albumartist = (album_info.artist_credit or
- album_info.artist)
- item.albumartists = (album_info.artists_credit or
- album_info.artists)
+ if config["artist_credit"]:
+ item.artist = (
+ track_info.artist_credit
+ or track_info.artist
+ or album_info.artist_credit
+ or album_info.artist
+ )
+ item.artists = (
+ track_info.artists_credit
+ or track_info.artists
+ or album_info.artists_credit
+ or album_info.artists
+ )
+ item.albumartist = album_info.artist_credit or album_info.artist
+ item.albumartists = album_info.artists_credit or album_info.artists
else:
- item.artist = (track_info.artist or album_info.artist)
- item.artists = (track_info.artists or album_info.artists)
+ item.artist = track_info.artist or album_info.artist
+ item.artists = track_info.artists or album_info.artists
item.albumartist = album_info.artist
item.albumartists = album_info.artists
@@ -141,27 +141,29 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]):
# Artist sort and credit names.
item.artist_sort = track_info.artist_sort or album_info.artist_sort
item.artists_sort = track_info.artists_sort or album_info.artists_sort
- item.artist_credit = (track_info.artist_credit or
- album_info.artist_credit)
- item.artists_credit = (track_info.artists_credit or
- album_info.artists_credit)
+ item.artist_credit = (
+ track_info.artist_credit or album_info.artist_credit
+ )
+ item.artists_credit = (
+ track_info.artists_credit or album_info.artists_credit
+ )
item.albumartist_sort = album_info.artist_sort
item.albumartists_sort = album_info.artists_sort
item.albumartist_credit = album_info.artist_credit
item.albumartists_credit = album_info.artists_credit
# Release date.
- for prefix in '', 'original_':
- if config['original_date'] and not prefix:
+ for prefix in "", "original_":
+ if config["original_date"] and not prefix:
# Ignore specific release date.
continue
- for suffix in 'year', 'month', 'day':
+ for suffix in "year", "month", "day":
key = prefix + suffix
value = getattr(album_info, key) or 0
# If we don't even have a year, apply nothing.
- if suffix == 'year' and not value:
+ if suffix == "year" and not value:
break
# Otherwise, set the fetched value (or 0 for the month
@@ -170,13 +172,13 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]):
# If we're using original release date for both fields,
# also set item.year = info.original_year, etc.
- if config['original_date']:
+ if config["original_date"]:
item[suffix] = value
# Title.
item.title = track_info.title
- if config['per_disc_numbering']:
+ if config["per_disc_numbering"]:
# We want to let the track number be zero, but if the medium index
# is not provided we need to fall back to the overall index.
if track_info.medium_index is not None:
@@ -219,17 +221,17 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]):
# Don't overwrite fields with empty values unless the
# field is explicitly allowed to be overwritten
for field, value in album_info.items():
- if field in SPECIAL_FIELDS['album']:
+ if field in SPECIAL_FIELDS["album"]:
continue
- clobber = field in config['overwrite_null']['album'].as_str_seq()
+ clobber = field in config["overwrite_null"]["album"].as_str_seq()
if value is None and not clobber:
continue
item[field] = value
for field, value in track_info.items():
- if field in SPECIAL_FIELDS['track']:
+ if field in SPECIAL_FIELDS["track"]:
continue
- clobber = field in config['overwrite_null']['track'].as_str_seq()
+ clobber = field in config["overwrite_null"]["track"].as_str_seq()
value = getattr(track_info, field)
if value is None and not clobber:
continue
diff --git a/beets/autotag/hooks.py b/beets/autotag/hooks.py
index 8d17e5729a..13c43e8cf2 100644
--- a/beets/autotag/hooks.py
+++ b/beets/autotag/hooks.py
@@ -15,22 +15,32 @@
"""Glue between metadata sources and the matching logic."""
from __future__ import annotations
+
+import re
from collections import namedtuple
from functools import total_ordering
-import re
-from typing import Dict, List, Tuple, Iterator, Union, Any, Optional, \
- Iterable, Callable, cast
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Tuple,
+ Union,
+ cast,
+)
-from beets import logging
-from beets import plugins
-from beets import config
-from beets.library import Item
-from beets.util import as_string
-from beets.autotag import mb
from jellyfish import levenshtein_distance
from unidecode import unidecode
-log = logging.getLogger('beets')
+from beets import config, logging, plugins
+from beets.autotag import mb
+from beets.library import Item
+from beets.util import as_string
+
+log = logging.getLogger("beets")
# Classes used to represent candidate options.
@@ -68,48 +78,48 @@ class AlbumInfo(AttrDict):
# TYPING: are all of these correct? I've assumed optional strings
def __init__(
- self,
- tracks: List['TrackInfo'],
- album: Optional[str] = None,
- album_id: Optional[str] = None,
- artist: Optional[str] = None,
- artist_id: Optional[str] = None,
- artists: Optional[List[str]] = None,
- artists_ids: Optional[List[str]] = None,
- asin: Optional[str] = None,
- albumtype: Optional[str] = None,
- albumtypes: Optional[List[str]] = None,
- va: bool = False,
- year: Optional[int] = None,
- month: Optional[int] = None,
- day: Optional[int] = None,
- label: Optional[str] = None,
- mediums: Optional[int] = None,
- artist_sort: Optional[str] = None,
- artists_sort: Optional[List[str]] = None,
- releasegroup_id: Optional[str] = None,
- release_group_title: Optional[str] = None,
- catalognum: Optional[str] = None,
- script: Optional[str] = None,
- language: Optional[str] = None,
- country: Optional[str] = None,
- style: Optional[str] = None,
- genre: Optional[str] = None,
- albumstatus: Optional[str] = None,
- media: Optional[str] = None,
- albumdisambig: Optional[str] = None,
- releasegroupdisambig: Optional[str] = None,
- artist_credit: Optional[str] = None,
- artists_credit: Optional[List[str]] = None,
- original_year: Optional[int] = None,
- original_month: Optional[int] = None,
- original_day: Optional[int] = None,
- data_source: Optional[str] = None,
- data_url: Optional[str] = None,
- discogs_albumid: Optional[str] = None,
- discogs_labelid: Optional[str] = None,
- discogs_artistid: Optional[str] = None,
- **kwargs,
+ self,
+ tracks: List["TrackInfo"],
+ album: Optional[str] = None,
+ album_id: Optional[str] = None,
+ artist: Optional[str] = None,
+ artist_id: Optional[str] = None,
+ artists: Optional[List[str]] = None,
+ artists_ids: Optional[List[str]] = None,
+ asin: Optional[str] = None,
+ albumtype: Optional[str] = None,
+ albumtypes: Optional[List[str]] = None,
+ va: bool = False,
+ year: Optional[int] = None,
+ month: Optional[int] = None,
+ day: Optional[int] = None,
+ label: Optional[str] = None,
+ mediums: Optional[int] = None,
+ artist_sort: Optional[str] = None,
+ artists_sort: Optional[List[str]] = None,
+ releasegroup_id: Optional[str] = None,
+ release_group_title: Optional[str] = None,
+ catalognum: Optional[str] = None,
+ script: Optional[str] = None,
+ language: Optional[str] = None,
+ country: Optional[str] = None,
+ style: Optional[str] = None,
+ genre: Optional[str] = None,
+ albumstatus: Optional[str] = None,
+ media: Optional[str] = None,
+ albumdisambig: Optional[str] = None,
+ releasegroupdisambig: Optional[str] = None,
+ artist_credit: Optional[str] = None,
+ artists_credit: Optional[List[str]] = None,
+ original_year: Optional[int] = None,
+ original_month: Optional[int] = None,
+ original_day: Optional[int] = None,
+ data_source: Optional[str] = None,
+ data_url: Optional[str] = None,
+ discogs_albumid: Optional[str] = None,
+ discogs_labelid: Optional[str] = None,
+ discogs_artistid: Optional[str] = None,
+ **kwargs,
):
self.album = album
self.album_id = album_id
@@ -156,24 +166,39 @@ def __init__(
# Work around a bug in python-musicbrainz-ngs that causes some
# strings to be bytes rather than Unicode.
# https://github.com/alastair/python-musicbrainz-ngs/issues/85
- def decode(self, codec: str = 'utf-8'):
+ def decode(self, codec: str = "utf-8"):
"""Ensure that all string attributes on this object, and the
constituent `TrackInfo` objects, are decoded to Unicode.
"""
- for fld in ['album', 'artist', 'albumtype', 'label', 'artist_sort',
- 'catalognum', 'script', 'language', 'country', 'style',
- 'genre', 'albumstatus', 'albumdisambig',
- 'releasegroupdisambig', 'artist_credit',
- 'media', 'discogs_albumid', 'discogs_labelid',
- 'discogs_artistid']:
+ for fld in [
+ "album",
+ "artist",
+ "albumtype",
+ "label",
+ "artist_sort",
+ "catalognum",
+ "script",
+ "language",
+ "country",
+ "style",
+ "genre",
+ "albumstatus",
+ "albumdisambig",
+ "releasegroupdisambig",
+ "artist_credit",
+ "media",
+ "discogs_albumid",
+ "discogs_labelid",
+ "discogs_artistid",
+ ]:
value = getattr(self, fld)
if isinstance(value, bytes):
- setattr(self, fld, value.decode(codec, 'ignore'))
+ setattr(self, fld, value.decode(codec, "ignore"))
for track in self.tracks:
track.decode(codec)
- def copy(self) -> 'AlbumInfo':
+ def copy(self) -> "AlbumInfo":
dupe = AlbumInfo([])
dupe.update(self)
dupe.tracks = [track.copy() for track in self.tracks]
@@ -194,40 +219,40 @@ class TrackInfo(AttrDict):
# TYPING: are all of these correct? I've assumed optional strings
def __init__(
- self,
- title: Optional[str] = None,
- track_id: Optional[str] = None,
- release_track_id: Optional[str] = None,
- artist: Optional[str] = None,
- artist_id: Optional[str] = None,
- artists: Optional[List[str]] = None,
- artists_ids: Optional[List[str]] = None,
- length: Optional[float] = None,
- index: Optional[int] = None,
- medium: Optional[int] = None,
- medium_index: Optional[int] = None,
- medium_total: Optional[int] = None,
- artist_sort: Optional[str] = None,
- artists_sort: Optional[List[str]] = None,
- disctitle: Optional[str] = None,
- artist_credit: Optional[str] = None,
- artists_credit: Optional[List[str]] = None,
- data_source: Optional[str] = None,
- data_url: Optional[str] = None,
- media: Optional[str] = None,
- lyricist: Optional[str] = None,
- composer: Optional[str] = None,
- composer_sort: Optional[str] = None,
- arranger: Optional[str] = None,
- track_alt: Optional[str] = None,
- work: Optional[str] = None,
- mb_workid: Optional[str] = None,
- work_disambig: Optional[str] = None,
- bpm: Optional[str] = None,
- initial_key: Optional[str] = None,
- genre: Optional[str] = None,
- album: Optional[str] = None,
- **kwargs,
+ self,
+ title: Optional[str] = None,
+ track_id: Optional[str] = None,
+ release_track_id: Optional[str] = None,
+ artist: Optional[str] = None,
+ artist_id: Optional[str] = None,
+ artists: Optional[List[str]] = None,
+ artists_ids: Optional[List[str]] = None,
+ length: Optional[float] = None,
+ index: Optional[int] = None,
+ medium: Optional[int] = None,
+ medium_index: Optional[int] = None,
+ medium_total: Optional[int] = None,
+ artist_sort: Optional[str] = None,
+ artists_sort: Optional[List[str]] = None,
+ disctitle: Optional[str] = None,
+ artist_credit: Optional[str] = None,
+ artists_credit: Optional[List[str]] = None,
+ data_source: Optional[str] = None,
+ data_url: Optional[str] = None,
+ media: Optional[str] = None,
+ lyricist: Optional[str] = None,
+ composer: Optional[str] = None,
+ composer_sort: Optional[str] = None,
+ arranger: Optional[str] = None,
+ track_alt: Optional[str] = None,
+ work: Optional[str] = None,
+ mb_workid: Optional[str] = None,
+ work_disambig: Optional[str] = None,
+ bpm: Optional[str] = None,
+ initial_key: Optional[str] = None,
+ genre: Optional[str] = None,
+ album: Optional[str] = None,
+ **kwargs,
):
self.title = title
self.track_id = track_id
@@ -264,17 +289,24 @@ def __init__(
self.update(kwargs)
# As above, work around a bug in python-musicbrainz-ngs.
- def decode(self, codec='utf-8'):
+ def decode(self, codec="utf-8"):
"""Ensure that all string attributes on this object are decoded
to Unicode.
"""
- for fld in ['title', 'artist', 'medium', 'artist_sort', 'disctitle',
- 'artist_credit', 'media']:
+ for fld in [
+ "title",
+ "artist",
+ "medium",
+ "artist_sort",
+ "disctitle",
+ "artist_credit",
+ "media",
+ ]:
value = getattr(self, fld)
if isinstance(value, bytes):
- setattr(self, fld, value.decode(codec, 'ignore'))
+ setattr(self, fld, value.decode(codec, "ignore"))
- def copy(self) -> 'TrackInfo':
+ def copy(self) -> "TrackInfo":
dupe = TrackInfo()
dupe.update(self)
return dupe
@@ -284,19 +316,19 @@ def copy(self) -> 'TrackInfo':
# Parameters for string distance function.
# Words that can be moved to the end of a string using a comma.
-SD_END_WORDS = ['the', 'a', 'an']
+SD_END_WORDS = ["the", "a", "an"]
# Reduced weights for certain portions of the string.
SD_PATTERNS = [
- (r'^the ', 0.1),
- (r'[\[\(]?(ep|single)[\]\)]?', 0.0),
- (r'[\[\(]?(featuring|feat|ft)[\. :].+', 0.1),
- (r'\(.*?\)', 0.3),
- (r'\[.*?\]', 0.3),
- (r'(, )?(pt\.|part) .+', 0.2),
+ (r"^the ", 0.1),
+ (r"[\[\(]?(ep|single)[\]\)]?", 0.0),
+ (r"[\[\(]?(featuring|feat|ft)[\. :].+", 0.1),
+ (r"\(.*?\)", 0.3),
+ (r"\[.*?\]", 0.3),
+ (r"(, )?(pt\.|part) .+", 0.2),
]
# Replacements to use before testing distance.
SD_REPLACE = [
- (r'&', 'and'),
+ (r"&", "and"),
]
@@ -310,8 +342,8 @@ def _string_dist_basic(str1: str, str2: str) -> float:
assert isinstance(str2, str)
str1 = as_string(unidecode(str1))
str2 = as_string(unidecode(str2))
- str1 = re.sub(r'[^a-z0-9]', '', str1.lower())
- str2 = re.sub(r'[^a-z0-9]', '', str2.lower())
+ str1 = re.sub(r"[^a-z0-9]", "", str1.lower())
+ str2 = re.sub(r"[^a-z0-9]", "", str2.lower())
if not str1 and not str2:
return 0.0
return levenshtein_distance(str1, str2) / float(max(len(str1), len(str2)))
@@ -334,10 +366,10 @@ def string_dist(str1: Optional[str], str2: Optional[str]) -> float:
# example, "the something" should be considered equal to
# "something, the".
for word in SD_END_WORDS:
- if str1.endswith(', %s' % word):
- str1 = '{} {}'.format(word, str1[:-len(word) - 2])
- if str2.endswith(', %s' % word):
- str2 = '{} {}'.format(word, str2[:-len(word) - 2])
+ if str1.endswith(", %s" % word):
+ str1 = "{} {}".format(word, str1[: -len(word) - 2])
+ if str2.endswith(", %s" % word):
+ str2 = "{} {}".format(word, str2[: -len(word) - 2])
# Perform a couple of basic normalizing substitutions.
for pat, repl in SD_REPLACE:
@@ -352,8 +384,8 @@ def string_dist(str1: Optional[str], str2: Optional[str]) -> float:
penalty = 0.0
for pat, weight in SD_PATTERNS:
# Get strings that drop the pattern.
- case_str1 = re.sub(pat, '', str1)
- case_str2 = re.sub(pat, '', str2)
+ case_str1 = re.sub(pat, "", str1)
+ case_str2 = re.sub(pat, "", str2)
if case_str1 != str1 or case_str2 != str2:
# If the pattern was present (i.e., it is deleted in the
@@ -405,9 +437,8 @@ def __init__(self):
@LazyClassProperty
def _weights(cls) -> Dict[str, float]: # noqa: N805
- """A dictionary from keys to floating-point weights.
- """
- weights_view = config['match']['distance_weights']
+ """A dictionary from keys to floating-point weights."""
+ weights_view = config["match"]["distance_weights"]
weights = {}
for key in weights_view.keys():
weights[key] = weights_view[key].as_number()
@@ -427,8 +458,7 @@ def distance(self) -> float:
@property
def max_distance(self) -> float:
- """Return the maximum distance penalty (normalization factor).
- """
+ """Return the maximum distance penalty (normalization factor)."""
dist_max = 0.0
for key, penalty in self._penalties.items():
dist_max += len(penalty) * self._weights[key]
@@ -436,8 +466,7 @@ def max_distance(self) -> float:
@property
def raw_distance(self) -> float:
- """Return the raw (denormalized) distance.
- """
+ """Return the raw (denormalized) distance."""
dist_raw = 0.0
for key, penalty in self._penalties.items():
dist_raw += sum(penalty) * self._weights[key]
@@ -457,8 +486,7 @@ def items(self) -> List[Tuple[str, float]]:
# ascending order (for keys, when the penalty is equal) and
# still get the items with the biggest distance first.
return sorted(
- list_,
- key=lambda key_and_dist: (-key_and_dist[1], key_and_dist[0])
+ list_, key=lambda key_and_dist: (-key_and_dist[1], key_and_dist[0])
)
def __hash__(self) -> int:
@@ -487,8 +515,7 @@ def __str__(self) -> str:
# Behave like a dict.
def __getitem__(self, key) -> float:
- """Returns the weighted distance for a named penalty.
- """
+ """Returns the weighted distance for a named penalty."""
dist = sum(self._penalties[key]) * self._weights[key]
dist_max = self.max_distance
if dist_max:
@@ -504,12 +531,11 @@ def __len__(self) -> int:
def keys(self) -> List[str]:
return [key for key, _ in self.items()]
- def update(self, dist: 'Distance'):
- """Adds all the distance penalties from `dist`.
- """
+ def update(self, dist: "Distance"):
+ """Adds all the distance penalties from `dist`."""
if not isinstance(dist, Distance):
raise ValueError(
- '`dist` must be a Distance object, not {}'.format(type(dist))
+ "`dist` must be a Distance object, not {}".format(type(dist))
)
for key, penalties in dist._penalties.items():
self._penalties.setdefault(key, []).extend(penalties)
@@ -533,16 +559,14 @@ def add(self, key: str, dist: float):
for the same key.
"""
if not 0.0 <= dist <= 1.0:
- raise ValueError(
- f'`dist` must be between 0.0 and 1.0, not {dist}'
- )
+ raise ValueError(f"`dist` must be between 0.0 and 1.0, not {dist}")
self._penalties.setdefault(key, []).append(dist)
def add_equality(
- self,
- key: str,
- value: Any,
- options: Union[List[Any], Tuple[Any, ...], Any],
+ self,
+ key: str,
+ value: Any,
+ options: Union[List[Any], Tuple[Any, ...], Any],
):
"""Adds a distance penalty of 1.0 if `value` doesn't match any
of the values in `options`. If an option is a compiled regular
@@ -582,10 +606,10 @@ def add_number(self, key: str, number1: int, number2: int):
self.add(key, 0.0)
def add_priority(
- self,
- key: str,
- value: Any,
- options: Union[List[Any], Tuple[Any, ...], Any],
+ self,
+ key: str,
+ value: Any,
+ options: Union[List[Any], Tuple[Any, ...], Any],
):
"""Adds a distance penalty that corresponds to the position at
which `value` appears in `options`. A distance penalty of 0.0
@@ -605,10 +629,10 @@ def add_priority(
self.add(key, dist)
def add_ratio(
- self,
- key: str,
- number1: Union[int, float],
- number2: Union[int, float],
+ self,
+ key: str,
+ number1: Union[int, float],
+ number2: Union[int, float],
):
"""Adds a distance penalty for `number1` as a ratio of `number2`.
`number1` is bound at 0 and `number2`.
@@ -630,14 +654,16 @@ def add_string(self, key: str, str1: Optional[str], str2: Optional[str]):
# Structures that compose all the information for a candidate match.
-AlbumMatch = namedtuple('AlbumMatch', ['distance', 'info', 'mapping',
- 'extra_items', 'extra_tracks'])
+AlbumMatch = namedtuple(
+ "AlbumMatch", ["distance", "info", "mapping", "extra_items", "extra_tracks"]
+)
-TrackMatch = namedtuple('TrackMatch', ['distance', 'info'])
+TrackMatch = namedtuple("TrackMatch", ["distance", "info"])
# Aggregation of sources.
+
def album_for_mbid(release_id: str) -> Optional[AlbumInfo]:
"""Get an AlbumInfo object for a MusicBrainz release ID. Return None
if the ID is not found.
@@ -645,7 +671,7 @@ def album_for_mbid(release_id: str) -> Optional[AlbumInfo]:
try:
album = mb.album_for_id(release_id)
if album:
- plugins.send('albuminfo_received', info=album)
+ plugins.send("albuminfo_received", info=album)
return album
except mb.MusicBrainzAPIError as exc:
exc.log(log)
@@ -659,7 +685,7 @@ def track_for_mbid(recording_id: str) -> Optional[TrackInfo]:
try:
track = mb.track_for_id(recording_id)
if track:
- plugins.send('trackinfo_received', info=track)
+ plugins.send("trackinfo_received", info=track)
return track
except mb.MusicBrainzAPIError as exc:
exc.log(log)
@@ -673,7 +699,7 @@ def albums_for_id(album_id: str) -> Iterable[AlbumInfo]:
yield a
for a in plugins.album_for_id(album_id):
if a:
- plugins.send('albuminfo_received', info=a)
+ plugins.send("albuminfo_received", info=a)
yield a
@@ -684,7 +710,7 @@ def tracks_for_id(track_id: str) -> Iterable[TrackInfo]:
yield t
for t in plugins.track_for_id(track_id):
if t:
- plugins.send('trackinfo_received', info=t)
+ plugins.send("trackinfo_received", info=t)
yield t
@@ -696,13 +722,13 @@ def invoke_mb(call_func: Callable, *args):
return ()
-@plugins.notify_info_yielded('albuminfo_received')
+@plugins.notify_info_yielded("albuminfo_received")
def album_candidates(
- items: List[Item],
- artist: str,
- album: str,
- va_likely: bool,
- extra_tags: Dict,
+ items: List[Item],
+ artist: str,
+ album: str,
+ va_likely: bool,
+ extra_tags: Dict,
) -> Iterable[Tuple]:
"""Search for album matches. ``items`` is a list of Item objects
that make up the album. ``artist`` and ``album`` are the respective
@@ -716,19 +742,21 @@ def album_candidates(
if config["musicbrainz"]["enabled"]:
# Base candidates if we have album and artist to match.
if artist and album:
- yield from invoke_mb(mb.match_album, artist, album, len(items),
- extra_tags)
+ yield from invoke_mb(
+ mb.match_album, artist, album, len(items), extra_tags
+ )
# Also add VA matches from MusicBrainz where appropriate.
if va_likely and album:
- yield from invoke_mb(mb.match_album, None, album, len(items),
- extra_tags)
+ yield from invoke_mb(
+ mb.match_album, None, album, len(items), extra_tags
+ )
# Candidates from plugins.
yield from plugins.candidates(items, artist, album, va_likely, extra_tags)
-@plugins.notify_info_yielded('trackinfo_received')
+@plugins.notify_info_yielded("trackinfo_received")
def item_candidates(item: Item, artist: str, title: str) -> Iterable[Tuple]:
"""Search for item matches. ``item`` is the Item to be matched.
``artist`` and ``title`` are strings and either reflect the item or
diff --git a/beets/autotag/match.py b/beets/autotag/match.py
index 10fe2b24ca..c79eba2d7e 100644
--- a/beets/autotag/match.py
+++ b/beets/autotag/match.py
@@ -19,6 +19,7 @@
import datetime
import re
+from collections import namedtuple
from typing import (
Any,
Dict,
@@ -33,33 +34,38 @@
)
from munkres import Munkres
-from collections import namedtuple
-from beets import logging
-from beets import plugins
-from beets import config
+from beets import config, logging, plugins
+from beets.autotag import (
+ AlbumInfo,
+ AlbumMatch,
+ Distance,
+ TrackInfo,
+ TrackMatch,
+ hooks,
+)
from beets.library import Item
from beets.util import plurality
-from beets.autotag import hooks, TrackInfo, Distance, AlbumInfo, TrackMatch, \
- AlbumMatch
from beets.util.enumeration import OrderedEnum
# Artist signals that indicate "various artists". These are used at the
# album level to determine whether a given release is likely a VA
# release and also on the track level to to remove the penalty for
# differing artists.
-VA_ARTISTS = ('', 'various artists', 'various', 'va', 'unknown')
+VA_ARTISTS = ("", "various artists", "various", "va", "unknown")
# Global logger.
-log = logging.getLogger('beets')
+log = logging.getLogger("beets")
# Recommendation enumeration.
+
class Recommendation(OrderedEnum):
"""Indicates a qualitative suggestion to the user about what should
be done with a given match.
"""
+
none = 0
low = 1
medium = 2
@@ -70,11 +76,12 @@ class Recommendation(OrderedEnum):
# consists of a list of possible candidates (i.e., AlbumInfo or TrackInfo
# objects) and a recommendation value.
-Proposal = namedtuple('Proposal', ('candidates', 'recommendation'))
+Proposal = namedtuple("Proposal", ("candidates", "recommendation"))
# Primary matching functionality.
+
def current_metadata(
items: Iterable[Item],
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
@@ -87,24 +94,34 @@ def current_metadata(
likelies = {}
consensus = {}
- fields = ['artist', 'album', 'albumartist', 'year', 'disctotal',
- 'mb_albumid', 'label', 'catalognum', 'country', 'media',
- 'albumdisambig']
+ fields = [
+ "artist",
+ "album",
+ "albumartist",
+ "year",
+ "disctotal",
+ "mb_albumid",
+ "label",
+ "catalognum",
+ "country",
+ "media",
+ "albumdisambig",
+ ]
for field in fields:
values = [item[field] for item in items if item]
likelies[field], freq = plurality(values)
- consensus[field] = (freq == len(values))
+ consensus[field] = freq == len(values)
# If there's an album artist consensus, use this for the artist.
- if consensus['albumartist'] and likelies['albumartist']:
- likelies['artist'] = likelies['albumartist']
+ if consensus["albumartist"] and likelies["albumartist"]:
+ likelies["artist"] = likelies["albumartist"]
return likelies, consensus
def assign_items(
- items: Sequence[Item],
- tracks: Sequence[TrackInfo],
+ items: Sequence[Item],
+ tracks: Sequence[TrackInfo],
) -> Tuple[Dict[Item, TrackInfo], List[Item], List[TrackInfo]]:
"""Given a list of Items and a list of TrackInfo objects, find the
best mapping between them. Returns a mapping from Items to TrackInfo
@@ -121,9 +138,9 @@ def assign_items(
costs.append(row)
# Find a minimum-cost bipartite matching.
- log.debug('Computing track assignment...')
+ log.debug("Computing track assignment...")
matching = Munkres().compute(costs)
- log.debug('...done.')
+ log.debug("...done.")
# Produce the output matching.
mapping = {items[i]: tracks[j] for (i, j) in matching}
@@ -142,9 +159,9 @@ def track_index_changed(item: Item, track_info: TrackInfo) -> bool:
def track_distance(
- item: Item,
- track_info: TrackInfo,
- incl_artist: bool = False,
+ item: Item,
+ track_info: TrackInfo,
+ incl_artist: bool = False,
) -> Distance:
"""Determines the significance of a track metadata change. Returns a
Distance object. `incl_artist` indicates that a distance component should
@@ -157,31 +174,34 @@ def track_distance(
item_length = cast(float, item.length)
track_length_grace = cast(
Union[float, int],
- config['match']['track_length_grace'].as_number(),
+ config["match"]["track_length_grace"].as_number(),
)
track_length_max = cast(
Union[float, int],
- config['match']['track_length_max'].as_number(),
+ config["match"]["track_length_max"].as_number(),
)
diff = abs(item_length - track_info.length) - track_length_grace
- dist.add_ratio('track_length', diff, track_length_max)
+ dist.add_ratio("track_length", diff, track_length_max)
# Title.
- dist.add_string('track_title', item.title, track_info.title)
+ dist.add_string("track_title", item.title, track_info.title)
# Artist. Only check if there is actually an artist in the track data.
- if incl_artist and track_info.artist and \
- item.artist.lower() not in VA_ARTISTS:
- dist.add_string('track_artist', item.artist, track_info.artist)
+ if (
+ incl_artist
+ and track_info.artist
+ and item.artist.lower() not in VA_ARTISTS
+ ):
+ dist.add_string("track_artist", item.artist, track_info.artist)
# Track index.
if track_info.index and item.track:
- dist.add_expr('track_index', track_index_changed(item, track_info))
+ dist.add_expr("track_index", track_index_changed(item, track_info))
# Track ID.
if item.mb_trackid:
- dist.add_expr('track_id', item.mb_trackid != track_info.track_id)
+ dist.add_expr("track_id", item.mb_trackid != track_info.track_id)
# Plugins.
dist.update(plugins.track_distance(item, track_info))
@@ -190,9 +210,9 @@ def track_distance(
def distance(
- items: Sequence[Item],
- album_info: AlbumInfo,
- mapping: Dict[Item, TrackInfo],
+ items: Sequence[Item],
+ album_info: AlbumInfo,
+ mapping: Dict[Item, TrackInfo],
) -> Distance:
"""Determines how "significant" an album metadata change would be.
Returns a Distance object. `album_info` is an AlbumInfo object
@@ -208,92 +228,96 @@ def distance(
# Artist, if not various.
if not album_info.va:
- dist.add_string('artist', likelies['artist'], album_info.artist)
+ dist.add_string("artist", likelies["artist"], album_info.artist)
# Album.
- dist.add_string('album', likelies['album'], album_info.album)
+ dist.add_string("album", likelies["album"], album_info.album)
# Current or preferred media.
if album_info.media:
# Preferred media options.
- patterns = config['match']['preferred']['media'].as_str_seq()
+ patterns = config["match"]["preferred"]["media"].as_str_seq()
patterns = cast(Sequence, patterns)
- options = [re.compile(r'(\d+x)?(%s)' % pat, re.I) for pat in patterns]
+ options = [re.compile(r"(\d+x)?(%s)" % pat, re.I) for pat in patterns]
if options:
- dist.add_priority('media', album_info.media, options)
+ dist.add_priority("media", album_info.media, options)
# Current media.
- elif likelies['media']:
- dist.add_equality('media', album_info.media, likelies['media'])
+ elif likelies["media"]:
+ dist.add_equality("media", album_info.media, likelies["media"])
# Mediums.
- if likelies['disctotal'] and album_info.mediums:
- dist.add_number('mediums', likelies['disctotal'], album_info.mediums)
+ if likelies["disctotal"] and album_info.mediums:
+ dist.add_number("mediums", likelies["disctotal"], album_info.mediums)
# Prefer earliest release.
- if album_info.year and config['match']['preferred']['original_year']:
+ if album_info.year and config["match"]["preferred"]["original_year"]:
# Assume 1889 (earliest first gramophone discs) if we don't know the
# original year.
original = album_info.original_year or 1889
diff = abs(album_info.year - original)
diff_max = abs(datetime.date.today().year - original)
- dist.add_ratio('year', diff, diff_max)
+ dist.add_ratio("year", diff, diff_max)
# Year.
- elif likelies['year'] and album_info.year:
- if likelies['year'] in (album_info.year, album_info.original_year):
+ elif likelies["year"] and album_info.year:
+ if likelies["year"] in (album_info.year, album_info.original_year):
# No penalty for matching release or original year.
- dist.add('year', 0.0)
+ dist.add("year", 0.0)
elif album_info.original_year:
# Prefer matchest closest to the release year.
- diff = abs(likelies['year'] - album_info.year)
- diff_max = abs(datetime.date.today().year -
- album_info.original_year)
- dist.add_ratio('year', diff, diff_max)
+ diff = abs(likelies["year"] - album_info.year)
+ diff_max = abs(
+ datetime.date.today().year - album_info.original_year
+ )
+ dist.add_ratio("year", diff, diff_max)
else:
# Full penalty when there is no original year.
- dist.add('year', 1.0)
+ dist.add("year", 1.0)
# Preferred countries.
- patterns = config['match']['preferred']['countries'].as_str_seq()
+ patterns = config["match"]["preferred"]["countries"].as_str_seq()
patterns = cast(Sequence, patterns)
options = [re.compile(pat, re.I) for pat in patterns]
if album_info.country and options:
- dist.add_priority('country', album_info.country, options)
+ dist.add_priority("country", album_info.country, options)
# Country.
- elif likelies['country'] and album_info.country:
- dist.add_string('country', likelies['country'], album_info.country)
+ elif likelies["country"] and album_info.country:
+ dist.add_string("country", likelies["country"], album_info.country)
# Label.
- if likelies['label'] and album_info.label:
- dist.add_string('label', likelies['label'], album_info.label)
+ if likelies["label"] and album_info.label:
+ dist.add_string("label", likelies["label"], album_info.label)
# Catalog number.
- if likelies['catalognum'] and album_info.catalognum:
- dist.add_string('catalognum', likelies['catalognum'],
- album_info.catalognum)
+ if likelies["catalognum"] and album_info.catalognum:
+ dist.add_string(
+ "catalognum", likelies["catalognum"], album_info.catalognum
+ )
# Disambiguation.
- if likelies['albumdisambig'] and album_info.albumdisambig:
- dist.add_string('albumdisambig', likelies['albumdisambig'],
- album_info.albumdisambig)
+ if likelies["albumdisambig"] and album_info.albumdisambig:
+ dist.add_string(
+ "albumdisambig", likelies["albumdisambig"], album_info.albumdisambig
+ )
# Album ID.
- if likelies['mb_albumid']:
- dist.add_equality('album_id', likelies['mb_albumid'],
- album_info.album_id)
+ if likelies["mb_albumid"]:
+ dist.add_equality(
+ "album_id", likelies["mb_albumid"], album_info.album_id
+ )
# Tracks.
dist.tracks = {}
for item, track in mapping.items():
dist.tracks[track] = track_distance(item, track, album_info.va)
- dist.add('tracks', dist.tracks[track].distance)
+ dist.add("tracks", dist.tracks[track].distance)
# Missing tracks.
for _ in range(len(album_info.tracks) - len(mapping)):
- dist.add('missing_tracks', 1.0)
+ dist.add("missing_tracks", 1.0)
# Unmatched tracks.
for _ in range(len(items) - len(mapping)):
- dist.add('unmatched_tracks', 1.0)
+ dist.add("unmatched_tracks", 1.0)
# Plugins.
dist.update(plugins.album_distance(items, album_info, mapping))
@@ -312,21 +336,21 @@ def match_by_id(items: Iterable[Item]):
try:
first = next(albumids)
except StopIteration:
- log.debug('No album ID found.')
+ log.debug("No album ID found.")
return None
# Is there a consensus on the MB album ID?
for other in albumids:
if other != first:
- log.debug('No album ID consensus.')
+ log.debug("No album ID consensus.")
return None
# If all album IDs are equal, look up the album.
- log.debug('Searching for discovered album ID: {0}', first)
+ log.debug("Searching for discovered album ID: {0}", first)
return hooks.album_for_mbid(first)
def _recommendation(
- results: Sequence[Union[AlbumMatch, TrackMatch]],
+ results: Sequence[Union[AlbumMatch, TrackMatch]],
) -> Recommendation:
"""Given a sorted list of AlbumMatch or TrackMatch objects, return a
recommendation based on the results' distances.
@@ -341,17 +365,19 @@ def _recommendation(
# Basic distance thresholding.
min_dist = results[0].distance
- if min_dist < config['match']['strong_rec_thresh'].as_number():
+ if min_dist < config["match"]["strong_rec_thresh"].as_number():
# Strong recommendation level.
rec = Recommendation.strong
- elif min_dist <= config['match']['medium_rec_thresh'].as_number():
+ elif min_dist <= config["match"]["medium_rec_thresh"].as_number():
# Medium recommendation level.
rec = Recommendation.medium
elif len(results) == 1:
# Only a single candidate.
rec = Recommendation.low
- elif results[1].distance - min_dist >= \
- config['match']['rec_gap_thresh'].as_number():
+ elif (
+ results[1].distance - min_dist
+ >= config["match"]["rec_gap_thresh"].as_number()
+ ):
# Gap between first two candidates is large.
rec = Recommendation.low
else:
@@ -364,15 +390,17 @@ def _recommendation(
if isinstance(results[0], hooks.AlbumMatch):
for track_dist in min_dist.tracks.values():
keys.update(list(track_dist.keys()))
- max_rec_view = config['match']['max_rec']
+ max_rec_view = config["match"]["max_rec"]
for key in keys:
if key in list(max_rec_view.keys()):
- max_rec = max_rec_view[key].as_choice({
- 'strong': Recommendation.strong,
- 'medium': Recommendation.medium,
- 'low': Recommendation.low,
- 'none': Recommendation.none,
- })
+ max_rec = max_rec_view[key].as_choice(
+ {
+ "strong": Recommendation.strong,
+ "medium": Recommendation.medium,
+ "low": Recommendation.low,
+ "none": Recommendation.none,
+ }
+ )
rec = min(rec, max_rec)
return rec
@@ -396,23 +424,24 @@ def _add_candidate(
checking the track count, ordering the items, checking for
duplicates, and calculating the distance.
"""
- log.debug('Candidate: {0} - {1} ({2})',
- info.artist, info.album, info.album_id)
+ log.debug(
+ "Candidate: {0} - {1} ({2})", info.artist, info.album, info.album_id
+ )
# Discard albums with zero tracks.
if not info.tracks:
- log.debug('No tracks.')
+ log.debug("No tracks.")
return
# Prevent duplicates.
if info.album_id and info.album_id in results:
- log.debug('Duplicate.')
+ log.debug("Duplicate.")
return
# Discard matches without required tags.
- for req_tag in cast(Sequence, config['match']['required'].as_str_seq()):
+ for req_tag in cast(Sequence, config["match"]["required"].as_str_seq()):
if getattr(info, req_tag) is None:
- log.debug('Ignored. Missing required tag: {0}', req_tag)
+ log.debug("Ignored. Missing required tag: {0}", req_tag)
return
# Find mapping between the items and the track info.
@@ -423,22 +452,23 @@ def _add_candidate(
# Skip matches with ignored penalties.
penalties = [key for key, _ in dist]
- ignored = cast(Sequence[str], config['match']['ignored'].as_str_seq())
+ ignored = cast(Sequence[str], config["match"]["ignored"].as_str_seq())
for penalty in ignored:
if penalty in penalties:
- log.debug('Ignored. Penalty: {0}', penalty)
+ log.debug("Ignored. Penalty: {0}", penalty)
return
- log.debug('Success. Distance: {0}', dist)
- results[info.album_id] = hooks.AlbumMatch(dist, info, mapping,
- extra_items, extra_tracks)
+ log.debug("Success. Distance: {0}", dist)
+ results[info.album_id] = hooks.AlbumMatch(
+ dist, info, mapping, extra_items, extra_tracks
+ )
def tag_album(
- items,
- search_artist: Optional[str] = None,
- search_album: Optional[str] = None,
- search_ids: List = [],
+ items,
+ search_artist: Optional[str] = None,
+ search_album: Optional[str] = None,
+ search_ids: List = [],
) -> Tuple[str, str, Proposal]:
"""Return a tuple of the current artist name, the current album
name, and a `Proposal` containing `AlbumMatch` candidates.
@@ -459,9 +489,9 @@ def tag_album(
"""
# Get current metadata.
likelies, consensus = current_metadata(items)
- cur_artist = cast(str, likelies['artist'])
- cur_album = cast(str, likelies['album'])
- log.debug('Tagging {0} - {1}', cur_artist, cur_album)
+ cur_artist = cast(str, likelies["artist"])
+ cur_album = cast(str, likelies["album"])
+ log.debug("Tagging {0} - {1}", cur_artist, cur_album)
# The output result, keys are the MB album ID.
candidates: Dict[Any, AlbumMatch] = {}
@@ -469,7 +499,7 @@ def tag_album(
# Search by explicit ID.
if search_ids:
for search_id in search_ids:
- log.debug('Searching for album ID: {0}', search_id)
+ log.debug("Searching for album ID: {0}", search_id)
for album_info_for_id in hooks.albums_for_id(search_id):
_add_candidate(items, candidates, album_info_for_id)
@@ -480,43 +510,46 @@ def tag_album(
if id_info:
_add_candidate(items, candidates, id_info)
rec = _recommendation(list(candidates.values()))
- log.debug('Album ID match recommendation is {0}', rec)
- if candidates and not config['import']['timid']:
+ log.debug("Album ID match recommendation is {0}", rec)
+ if candidates and not config["import"]["timid"]:
# If we have a very good MBID match, return immediately.
# Otherwise, this match will compete against metadata-based
# matches.
if rec == Recommendation.strong:
- log.debug('ID match.')
- return cur_artist, cur_album, \
- Proposal(list(candidates.values()), rec)
+ log.debug("ID match.")
+ return (
+ cur_artist,
+ cur_album,
+ Proposal(list(candidates.values()), rec),
+ )
# Search terms.
if not (search_artist and search_album):
# No explicit search terms -- use current metadata.
search_artist, search_album = cur_artist, cur_album
- log.debug('Search terms: {0} - {1}', search_artist, search_album)
+ log.debug("Search terms: {0} - {1}", search_artist, search_album)
extra_tags = None
- if config['musicbrainz']['extra_tags']:
- tag_list = config['musicbrainz']['extra_tags'].get()
+ if config["musicbrainz"]["extra_tags"]:
+ tag_list = config["musicbrainz"]["extra_tags"].get()
extra_tags = {k: v for (k, v) in likelies.items() if k in tag_list}
- log.debug('Additional search terms: {0}', extra_tags)
+ log.debug("Additional search terms: {0}", extra_tags)
# Is this album likely to be a "various artist" release?
- va_likely = ((not consensus['artist']) or
- (search_artist.lower() in VA_ARTISTS) or
- any(item.comp for item in items))
- log.debug('Album might be VA: {0}', va_likely)
+ va_likely = (
+ (not consensus["artist"])
+ or (search_artist.lower() in VA_ARTISTS)
+ or any(item.comp for item in items)
+ )
+ log.debug("Album might be VA: {0}", va_likely)
# Get the results from the data sources.
- for matched_candidate in hooks.album_candidates(items,
- search_artist,
- search_album,
- va_likely,
- extra_tags):
+ for matched_candidate in hooks.album_candidates(
+ items, search_artist, search_album, va_likely, extra_tags
+ ):
_add_candidate(items, candidates, matched_candidate)
- log.debug('Evaluating {0} candidates.', len(candidates))
+ log.debug("Evaluating {0} candidates.", len(candidates))
# Sort and get the recommendation.
candidates_sorted = _sort_candidates(candidates.values())
rec = _recommendation(candidates_sorted)
@@ -524,10 +557,10 @@ def tag_album(
def tag_item(
- item,
- search_artist: Optional[str] = None,
- search_title: Optional[str] = None,
- search_ids: List = [],
+ item,
+ search_artist: Optional[str] = None,
+ search_title: Optional[str] = None,
+ search_ids: List = [],
) -> Proposal:
"""Find metadata for a single track. Return a `Proposal` consisting
of `TrackMatch` objects.
@@ -546,16 +579,19 @@ def tag_item(
trackids = search_ids or [t for t in [item.mb_trackid] if t]
if trackids:
for trackid in trackids:
- log.debug('Searching for track ID: {0}', trackid)
+ log.debug("Searching for track ID: {0}", trackid)
for track_info in hooks.tracks_for_id(trackid):
dist = track_distance(item, track_info, incl_artist=True)
- candidates[track_info.track_id] = \
- hooks.TrackMatch(dist, track_info)
+ candidates[track_info.track_id] = hooks.TrackMatch(
+ dist, track_info
+ )
# If this is a good match, then don't keep searching.
rec = _recommendation(_sort_candidates(candidates.values()))
- if rec == Recommendation.strong and \
- not config['import']['timid']:
- log.debug('Track ID match.')
+ if (
+ rec == Recommendation.strong
+ and not config["import"]["timid"]
+ ):
+ log.debug("Track ID match.")
return Proposal(_sort_candidates(candidates.values()), rec)
# If we're searching by ID, don't proceed.
@@ -569,7 +605,7 @@ def tag_item(
# Search terms.
if not (search_artist and search_title):
search_artist, search_title = item.artist, item.title
- log.debug('Item search terms: {0} - {1}', search_artist, search_title)
+ log.debug("Item search terms: {0} - {1}", search_artist, search_title)
# Get and evaluate candidate metadata.
for track_info in hooks.item_candidates(item, search_artist, search_title):
@@ -577,7 +613,7 @@ def tag_item(
candidates[track_info.track_id] = hooks.TrackMatch(dist, track_info)
# Sort by distance and return with recommendation.
- log.debug('Found {0} candidates.', len(candidates))
+ log.debug("Found {0} candidates.", len(candidates))
candidates_sorted = _sort_candidates(candidates.values())
rec = _recommendation(candidates_sorted)
return Proposal(candidates_sorted, rec)
diff --git a/beets/autotag/mb.py b/beets/autotag/mb.py
index 11a476e49c..0bd47f1e6e 100644
--- a/beets/autotag/mb.py
+++ b/beets/autotag/mb.py
@@ -15,41 +15,41 @@
"""Searches for albums in the MusicBrainz database.
"""
from __future__ import annotations
-from typing import Any, List, Sequence, Tuple, Dict, Optional, Iterator, cast
-import musicbrainzngs
import re
import traceback
-
-from beets import logging
-from beets import plugins
-import beets.autotag.hooks
-import beets
-from beets import util
-from beets import config
from collections import Counter
+from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, cast
from urllib.parse import urljoin
-from beets.util.id_extractors import extract_discogs_id_regex, \
- spotify_id_regex, deezer_id_regex, beatport_id_regex
+import musicbrainzngs
+
+import beets
+import beets.autotag.hooks
+from beets import config, logging, plugins, util
from beets.plugins import MetadataSourcePlugin
+from beets.util.id_extractors import (
+ beatport_id_regex,
+ deezer_id_regex,
+ extract_discogs_id_regex,
+ spotify_id_regex,
+)
-VARIOUS_ARTISTS_ID = '89ad4ac3-39f7-470e-963a-56509c546377'
+VARIOUS_ARTISTS_ID = "89ad4ac3-39f7-470e-963a-56509c546377"
-BASE_URL = 'https://musicbrainz.org/'
+BASE_URL = "https://musicbrainz.org/"
-SKIPPED_TRACKS = ['[data track]']
+SKIPPED_TRACKS = ["[data track]"]
FIELDS_TO_MB_KEYS = {
- 'catalognum': 'catno',
- 'country': 'country',
- 'label': 'label',
- 'media': 'format',
- 'year': 'date',
+ "catalognum": "catno",
+ "country": "country",
+ "label": "label",
+ "media": "format",
+ "year": "date",
}
-musicbrainzngs.set_useragent('beets', beets.__version__,
- 'https://beets.io/')
+musicbrainzngs.set_useragent("beets", beets.__version__, "https://beets.io/")
class MusicBrainzAPIError(util.HumanReadableException):
@@ -60,56 +60,72 @@ class MusicBrainzAPIError(util.HumanReadableException):
def __init__(self, reason, verb, query, tb=None):
self.query = query
if isinstance(reason, musicbrainzngs.WebServiceError):
- reason = 'MusicBrainz not reachable'
+ reason = "MusicBrainz not reachable"
super().__init__(reason, verb, tb)
def get_message(self):
- return '{} in {} with query {}'.format(
+ return "{} in {} with query {}".format(
self._reasonstr(), self.verb, repr(self.query)
)
-log = logging.getLogger('beets')
-
-RELEASE_INCLUDES = ['artists', 'media', 'recordings', 'release-groups',
- 'labels', 'artist-credits', 'aliases',
- 'recording-level-rels', 'work-rels',
- 'work-level-rels', 'artist-rels', 'isrcs',
- 'url-rels', 'release-rels']
-BROWSE_INCLUDES = ['artist-credits', 'work-rels',
- 'artist-rels', 'recording-rels', 'release-rels']
-if "work-level-rels" in musicbrainzngs.VALID_BROWSE_INCLUDES['recording']:
+log = logging.getLogger("beets")
+
+RELEASE_INCLUDES = [
+ "artists",
+ "media",
+ "recordings",
+ "release-groups",
+ "labels",
+ "artist-credits",
+ "aliases",
+ "recording-level-rels",
+ "work-rels",
+ "work-level-rels",
+ "artist-rels",
+ "isrcs",
+ "url-rels",
+ "release-rels",
+]
+BROWSE_INCLUDES = [
+ "artist-credits",
+ "work-rels",
+ "artist-rels",
+ "recording-rels",
+ "release-rels",
+]
+if "work-level-rels" in musicbrainzngs.VALID_BROWSE_INCLUDES["recording"]:
BROWSE_INCLUDES.append("work-level-rels")
BROWSE_CHUNKSIZE = 100
BROWSE_MAXTRACKS = 500
-TRACK_INCLUDES = ['artists', 'aliases', 'isrcs']
-if 'work-level-rels' in musicbrainzngs.VALID_INCLUDES['recording']:
- TRACK_INCLUDES += ['work-level-rels', 'artist-rels']
-if 'genres' in musicbrainzngs.VALID_INCLUDES['recording']:
- RELEASE_INCLUDES += ['genres']
+TRACK_INCLUDES = ["artists", "aliases", "isrcs"]
+if "work-level-rels" in musicbrainzngs.VALID_INCLUDES["recording"]:
+ TRACK_INCLUDES += ["work-level-rels", "artist-rels"]
+if "genres" in musicbrainzngs.VALID_INCLUDES["recording"]:
+ RELEASE_INCLUDES += ["genres"]
def track_url(trackid: str) -> str:
- return urljoin(BASE_URL, 'recording/' + trackid)
+ return urljoin(BASE_URL, "recording/" + trackid)
def album_url(albumid: str) -> str:
- return urljoin(BASE_URL, 'release/' + albumid)
+ return urljoin(BASE_URL, "release/" + albumid)
def configure():
"""Set up the python-musicbrainz-ngs module according to settings
from the beets configuration. This should be called at startup.
"""
- hostname = config['musicbrainz']['host'].as_str()
- https = config['musicbrainz']['https'].get(bool)
+ hostname = config["musicbrainz"]["host"].as_str()
+ https = config["musicbrainz"]["https"].get(bool)
# Only call set_hostname when a custom server is configured. Since
# musicbrainz-ngs connects to musicbrainz.org with HTTPS by default
if hostname != "musicbrainz.org":
musicbrainzngs.set_hostname(hostname, https)
musicbrainzngs.set_rate_limit(
- config['musicbrainz']['ratelimit_interval'].as_number(),
- config['musicbrainz']['ratelimit'].get(int),
+ config["musicbrainz"]["ratelimit_interval"].as_number(),
+ config["musicbrainz"]["ratelimit"].get(int),
)
@@ -122,20 +138,23 @@ def _preferred_alias(aliases: List):
return
# Only consider aliases that have locales set.
- aliases = [a for a in aliases if 'locale' in a]
+ aliases = [a for a in aliases if "locale" in a]
# Get any ignored alias types and lower case them to prevent case issues
- ignored_alias_types = config['import']['ignored_alias_types'].as_str_seq()
+ ignored_alias_types = config["import"]["ignored_alias_types"].as_str_seq()
ignored_alias_types = [a.lower() for a in ignored_alias_types]
# Search configured locales in order.
- for locale in config['import']['languages'].as_str_seq():
+ for locale in config["import"]["languages"].as_str_seq():
# Find matching primary aliases for this locale that are not
# being ignored
matches = []
for a in aliases:
- if a['locale'] == locale and 'primary' in a and \
- a.get('type', '').lower() not in ignored_alias_types:
+ if (
+ a["locale"] == locale
+ and "primary" in a
+ and a.get("type", "").lower() not in ignored_alias_types
+ ):
matches.append(a)
# Skip to the next locale if we have no matches
@@ -150,25 +169,22 @@ def _preferred_release_event(release: Dict[str, Any]) -> Tuple[str, str]:
event as a tuple of (country, release_date). Fall back to the
default release event if a preferred event is not found.
"""
- countries = config['match']['preferred']['countries'].as_str_seq()
+ countries = config["match"]["preferred"]["countries"].as_str_seq()
countries = cast(Sequence, countries)
for country in countries:
- for event in release.get('release-event-list', {}):
+ for event in release.get("release-event-list", {}):
try:
- if country in event['area']['iso-3166-1-code-list']:
- return country, event['date']
+ if country in event["area"]["iso-3166-1-code-list"]:
+ return country, event["date"]
except KeyError:
pass
- return (
- cast(str, release.get('country')),
- cast(str, release.get('date'))
- )
+ return (cast(str, release.get("country")), cast(str, release.get("date")))
def _multi_artist_credit(
- credit: List[Dict], include_join_phrase: bool
+ credit: List[Dict], include_join_phrase: bool
) -> Tuple[List[str], List[str], List[str]]:
"""Given a list representing an ``artist-credit`` block, accumulate
data into a triple of joined artist name lists: canonical, sort, and
@@ -186,26 +202,26 @@ def _multi_artist_credit(
artist_sort_parts.append(el)
else:
- alias = _preferred_alias(el['artist'].get('alias-list', ()))
+ alias = _preferred_alias(el["artist"].get("alias-list", ()))
# An artist.
if alias:
- cur_artist_name = alias['alias']
+ cur_artist_name = alias["alias"]
else:
- cur_artist_name = el['artist']['name']
+ cur_artist_name = el["artist"]["name"]
artist_parts.append(cur_artist_name)
# Artist sort name.
if alias:
- artist_sort_parts.append(alias['sort-name'])
- elif 'sort-name' in el['artist']:
- artist_sort_parts.append(el['artist']['sort-name'])
+ artist_sort_parts.append(alias["sort-name"])
+ elif "sort-name" in el["artist"]:
+ artist_sort_parts.append(el["artist"]["sort-name"])
else:
artist_sort_parts.append(cur_artist_name)
# Artist credit.
- if 'name' in el:
- artist_credit_parts.append(el['name'])
+ if "name" in el:
+ artist_credit_parts.append(el["name"])
else:
artist_credit_parts.append(cur_artist_name)
@@ -221,15 +237,13 @@ def _flatten_artist_credit(credit: List[Dict]) -> Tuple[str, str, str]:
data into a triple of joined artist name strings: canonical, sort, and
credit.
"""
- artist_parts, artist_sort_parts, artist_credit_parts = \
- _multi_artist_credit(
- credit,
- include_join_phrase=True
- )
+ artist_parts, artist_sort_parts, artist_credit_parts = _multi_artist_credit(
+ credit, include_join_phrase=True
+ )
return (
- ''.join(artist_parts),
- ''.join(artist_sort_parts),
- ''.join(artist_credit_parts),
+ "".join(artist_parts),
+ "".join(artist_sort_parts),
+ "".join(artist_credit_parts),
)
@@ -241,7 +255,7 @@ def _artist_ids(credit: List[Dict]) -> List[str]:
artist_ids: List[str] = []
for el in credit:
if isinstance(el, dict):
- artist_ids.append(el['artist']['id'])
+ artist_ids.append(el["artist"]["id"])
return artist_ids
@@ -253,18 +267,18 @@ def _get_related_artist_names(relations, relation_type):
related_artists = []
for relation in relations:
- if relation['type'] == relation_type:
- related_artists.append(relation['artist']['name'])
+ if relation["type"] == relation_type:
+ related_artists.append(relation["artist"]["name"])
- return ', '.join(related_artists)
+ return ", ".join(related_artists)
def track_info(
- recording: Dict,
- index: Optional[int] = None,
- medium: Optional[int] = None,
- medium_index: Optional[int] = None,
- medium_total: Optional[int] = None,
+ recording: Dict,
+ index: Optional[int] = None,
+ medium: Optional[int] = None,
+ medium_index: Optional[int] = None,
+ medium_total: Optional[int] = None,
) -> beets.autotag.hooks.TrackInfo:
"""Translates a MusicBrainz recording result dictionary into a beets
``TrackInfo`` object. Three parameters are optional and are used
@@ -274,81 +288,86 @@ def track_info(
the number of tracks on the medium. Each number is a 1-based index.
"""
info = beets.autotag.hooks.TrackInfo(
- title=recording['title'],
- track_id=recording['id'],
+ title=recording["title"],
+ track_id=recording["id"],
index=index,
medium=medium,
medium_index=medium_index,
medium_total=medium_total,
- data_source='MusicBrainz',
- data_url=track_url(recording['id']),
+ data_source="MusicBrainz",
+ data_url=track_url(recording["id"]),
)
- if recording.get('artist-credit'):
+ if recording.get("artist-credit"):
# Get the artist names.
- info.artist, info.artist_sort, info.artist_credit = \
- _flatten_artist_credit(recording['artist-credit'])
-
- info.artists, info.artists_sort, info.artists_credit = \
- _multi_artist_credit(
- recording['artist-credit'], include_join_phrase=False
- )
+ (
+ info.artist,
+ info.artist_sort,
+ info.artist_credit,
+ ) = _flatten_artist_credit(recording["artist-credit"])
+
+ (
+ info.artists,
+ info.artists_sort,
+ info.artists_credit,
+ ) = _multi_artist_credit(
+ recording["artist-credit"], include_join_phrase=False
+ )
- info.artists_ids = _artist_ids(recording['artist-credit'])
+ info.artists_ids = _artist_ids(recording["artist-credit"])
info.artist_id = info.artists_ids[0]
- if recording.get('artist-relation-list'):
+ if recording.get("artist-relation-list"):
info.remixer = _get_related_artist_names(
- recording['artist-relation-list'],
- relation_type='remixer'
+ recording["artist-relation-list"], relation_type="remixer"
)
- if recording.get('length'):
- info.length = int(recording['length']) / 1000.0
+ if recording.get("length"):
+ info.length = int(recording["length"]) / 1000.0
- info.trackdisambig = recording.get('disambiguation')
+ info.trackdisambig = recording.get("disambiguation")
- if recording.get('isrc-list'):
- info.isrc = ';'.join(recording['isrc-list'])
+ if recording.get("isrc-list"):
+ info.isrc = ";".join(recording["isrc-list"])
lyricist = []
composer = []
composer_sort = []
- for work_relation in recording.get('work-relation-list', ()):
- if work_relation['type'] != 'performance':
+ for work_relation in recording.get("work-relation-list", ()):
+ if work_relation["type"] != "performance":
continue
- info.work = work_relation['work']['title']
- info.mb_workid = work_relation['work']['id']
- if 'disambiguation' in work_relation['work']:
- info.work_disambig = work_relation['work']['disambiguation']
-
- for artist_relation in work_relation['work'].get(
- 'artist-relation-list', ()):
- if 'type' in artist_relation:
- type = artist_relation['type']
- if type == 'lyricist':
- lyricist.append(artist_relation['artist']['name'])
- elif type == 'composer':
- composer.append(artist_relation['artist']['name'])
- composer_sort.append(
- artist_relation['artist']['sort-name'])
+ info.work = work_relation["work"]["title"]
+ info.mb_workid = work_relation["work"]["id"]
+ if "disambiguation" in work_relation["work"]:
+ info.work_disambig = work_relation["work"]["disambiguation"]
+
+ for artist_relation in work_relation["work"].get(
+ "artist-relation-list", ()
+ ):
+ if "type" in artist_relation:
+ type = artist_relation["type"]
+ if type == "lyricist":
+ lyricist.append(artist_relation["artist"]["name"])
+ elif type == "composer":
+ composer.append(artist_relation["artist"]["name"])
+ composer_sort.append(artist_relation["artist"]["sort-name"])
if lyricist:
- info.lyricist = ', '.join(lyricist)
+ info.lyricist = ", ".join(lyricist)
if composer:
- info.composer = ', '.join(composer)
- info.composer_sort = ', '.join(composer_sort)
+ info.composer = ", ".join(composer)
+ info.composer_sort = ", ".join(composer_sort)
arranger = []
- for artist_relation in recording.get('artist-relation-list', ()):
- if 'type' in artist_relation:
- type = artist_relation['type']
- if type == 'arranger':
- arranger.append(artist_relation['artist']['name'])
+ for artist_relation in recording.get("artist-relation-list", ()):
+ if "type" in artist_relation:
+ type = artist_relation["type"]
+ if type == "arranger":
+ arranger.append(artist_relation["artist"]["name"])
if arranger:
- info.arranger = ', '.join(arranger)
+ info.arranger = ", ".join(arranger)
# Supplementary fields provided by plugins
- extra_trackdatas = plugins.send('mb_track_extract', data=recording)
+ extra_trackdatas = plugins.send("mb_track_extract", data=recording)
for extra_trackdata in extra_trackdatas:
info.update(extra_trackdata)
@@ -357,17 +376,17 @@ def track_info(
def _set_date_str(
- info: beets.autotag.hooks.AlbumInfo,
- date_str: str,
- original: bool = False,
+ info: beets.autotag.hooks.AlbumInfo,
+ date_str: str,
+ original: bool = False,
):
"""Given a (possibly partial) YYYY-MM-DD string and an AlbumInfo
object, set the object's release date fields appropriately. If
`original`, then set the original_year, etc., fields.
"""
if date_str:
- date_parts = date_str.split('-')
- for key in ('year', 'month', 'day'):
+ date_parts = date_str.split("-")
+ for key in ("year", "month", "day"):
if date_parts:
date_part = date_parts.pop(0)
try:
@@ -376,7 +395,7 @@ def _set_date_str(
continue
if original:
- key = 'original_' + key
+ key = "original_" + key
setattr(info, key, date_num)
@@ -385,154 +404,174 @@ def album_info(release: Dict) -> beets.autotag.hooks.AlbumInfo:
AlbumInfo object containing the interesting data about that release.
"""
# Get artist name using join phrases.
- artist_name, artist_sort_name, artist_credit_name = \
- _flatten_artist_credit(release['artist-credit'])
+ artist_name, artist_sort_name, artist_credit_name = _flatten_artist_credit(
+ release["artist-credit"]
+ )
- artists_names, artists_sort_names, artists_credit_names = \
- _multi_artist_credit(
- release['artist-credit'], include_join_phrase=False
- )
+ (
+ artists_names,
+ artists_sort_names,
+ artists_credit_names,
+ ) = _multi_artist_credit(
+ release["artist-credit"], include_join_phrase=False
+ )
- ntracks = sum(len(m['track-list']) for m in release['medium-list'])
+ ntracks = sum(len(m["track-list"]) for m in release["medium-list"])
# The MusicBrainz API omits 'artist-relation-list' and 'work-relation-list'
# when the release has more than 500 tracks. So we use browse_recordings
# on chunks of tracks to recover the same information in this case.
if ntracks > BROWSE_MAXTRACKS:
- log.debug('Album {} has too many tracks', release['id'])
+ log.debug("Album {} has too many tracks", release["id"])
recording_list = []
for i in range(0, ntracks, BROWSE_CHUNKSIZE):
- log.debug('Retrieving tracks starting at {}', i)
- recording_list.extend(musicbrainzngs.browse_recordings(
- release=release['id'], limit=BROWSE_CHUNKSIZE,
- includes=BROWSE_INCLUDES,
- offset=i)['recording-list'])
- track_map = {r['id']: r for r in recording_list}
- for medium in release['medium-list']:
- for recording in medium['track-list']:
- recording_info = track_map[recording['recording']['id']]
- recording['recording'] = recording_info
+ log.debug("Retrieving tracks starting at {}", i)
+ recording_list.extend(
+ musicbrainzngs.browse_recordings(
+ release=release["id"],
+ limit=BROWSE_CHUNKSIZE,
+ includes=BROWSE_INCLUDES,
+ offset=i,
+ )["recording-list"]
+ )
+ track_map = {r["id"]: r for r in recording_list}
+ for medium in release["medium-list"]:
+ for recording in medium["track-list"]:
+ recording_info = track_map[recording["recording"]["id"]]
+ recording["recording"] = recording_info
# Basic info.
track_infos = []
index = 0
- for medium in release['medium-list']:
- disctitle = medium.get('title')
- format = medium.get('format')
+ for medium in release["medium-list"]:
+ disctitle = medium.get("title")
+ format = medium.get("format")
- if format in config['match']['ignored_media'].as_str_seq():
+ if format in config["match"]["ignored_media"].as_str_seq():
continue
- all_tracks = medium['track-list']
- if ('data-track-list' in medium
- and not config['match']['ignore_data_tracks']):
- all_tracks += medium['data-track-list']
+ all_tracks = medium["track-list"]
+ if (
+ "data-track-list" in medium
+ and not config["match"]["ignore_data_tracks"]
+ ):
+ all_tracks += medium["data-track-list"]
track_count = len(all_tracks)
- if 'pregap' in medium:
- all_tracks.insert(0, medium['pregap'])
+ if "pregap" in medium:
+ all_tracks.insert(0, medium["pregap"])
for track in all_tracks:
-
- if ('title' in track['recording'] and
- track['recording']['title'] in SKIPPED_TRACKS):
+ if (
+ "title" in track["recording"]
+ and track["recording"]["title"] in SKIPPED_TRACKS
+ ):
continue
- if ('video' in track['recording'] and
- track['recording']['video'] == 'true' and
- config['match']['ignore_video_tracks']):
+ if (
+ "video" in track["recording"]
+ and track["recording"]["video"] == "true"
+ and config["match"]["ignore_video_tracks"]
+ ):
continue
# Basic information from the recording.
index += 1
ti = track_info(
- track['recording'],
+ track["recording"],
index,
- int(medium['position']),
- int(track['position']),
+ int(medium["position"]),
+ int(track["position"]),
track_count,
)
- ti.release_track_id = track['id']
+ ti.release_track_id = track["id"]
ti.disctitle = disctitle
ti.media = format
- ti.track_alt = track['number']
+ ti.track_alt = track["number"]
# Prefer track data, where present, over recording data.
- if track.get('title'):
- ti.title = track['title']
- if track.get('artist-credit'):
+ if track.get("title"):
+ ti.title = track["title"]
+ if track.get("artist-credit"):
# Get the artist names.
- ti.artist, ti.artist_sort, ti.artist_credit = \
- _flatten_artist_credit(track['artist-credit'])
-
- ti.artists, ti.artists_sort, ti.artists_credit = \
- _multi_artist_credit(
- track['artist-credit'], include_join_phrase=False
- )
-
- ti.artists_ids = _artist_ids(track['artist-credit'])
+ (
+ ti.artist,
+ ti.artist_sort,
+ ti.artist_credit,
+ ) = _flatten_artist_credit(track["artist-credit"])
+
+ (
+ ti.artists,
+ ti.artists_sort,
+ ti.artists_credit,
+ ) = _multi_artist_credit(
+ track["artist-credit"], include_join_phrase=False
+ )
+
+ ti.artists_ids = _artist_ids(track["artist-credit"])
ti.artist_id = ti.artists_ids[0]
- if track.get('length'):
- ti.length = int(track['length']) / (1000.0)
+ if track.get("length"):
+ ti.length = int(track["length"]) / (1000.0)
track_infos.append(ti)
- album_artist_ids = _artist_ids(release['artist-credit'])
+ album_artist_ids = _artist_ids(release["artist-credit"])
info = beets.autotag.hooks.AlbumInfo(
- album=release['title'],
- album_id=release['id'],
+ album=release["title"],
+ album_id=release["id"],
artist=artist_name,
artist_id=album_artist_ids[0],
artists=artists_names,
artists_ids=album_artist_ids,
tracks=track_infos,
- mediums=len(release['medium-list']),
+ mediums=len(release["medium-list"]),
artist_sort=artist_sort_name,
artists_sort=artists_sort_names,
artist_credit=artist_credit_name,
artists_credit=artists_credit_names,
- data_source='MusicBrainz',
- data_url=album_url(release['id']),
+ data_source="MusicBrainz",
+ data_url=album_url(release["id"]),
)
info.va = info.artist_id == VARIOUS_ARTISTS_ID
if info.va:
- info.artist = config['va_name'].as_str()
- info.asin = release.get('asin')
- info.releasegroup_id = release['release-group']['id']
- info.albumstatus = release.get('status')
+ info.artist = config["va_name"].as_str()
+ info.asin = release.get("asin")
+ info.releasegroup_id = release["release-group"]["id"]
+ info.albumstatus = release.get("status")
- if release['release-group'].get('title'):
- info.release_group_title = release['release-group'].get('title')
+ if release["release-group"].get("title"):
+ info.release_group_title = release["release-group"].get("title")
# Get the disambiguation strings at the release and release group level.
- if release['release-group'].get('disambiguation'):
- info.releasegroupdisambig = \
- release['release-group'].get('disambiguation')
- if release.get('disambiguation'):
- info.albumdisambig = release.get('disambiguation')
+ if release["release-group"].get("disambiguation"):
+ info.releasegroupdisambig = release["release-group"].get(
+ "disambiguation"
+ )
+ if release.get("disambiguation"):
+ info.albumdisambig = release.get("disambiguation")
# Get the "classic" Release type. This data comes from a legacy API
# feature before MusicBrainz supported multiple release types.
- if 'type' in release['release-group']:
- reltype = release['release-group']['type']
+ if "type" in release["release-group"]:
+ reltype = release["release-group"]["type"]
if reltype:
info.albumtype = reltype.lower()
# Set the new-style "primary" and "secondary" release types.
albumtypes = []
- if 'primary-type' in release['release-group']:
- rel_primarytype = release['release-group']['primary-type']
+ if "primary-type" in release["release-group"]:
+ rel_primarytype = release["release-group"]["primary-type"]
if rel_primarytype:
albumtypes.append(rel_primarytype.lower())
- if 'secondary-type-list' in release['release-group']:
- if release['release-group']['secondary-type-list']:
- for sec_type in release['release-group']['secondary-type-list']:
+ if "secondary-type-list" in release["release-group"]:
+ if release["release-group"]["secondary-type-list"]:
+ for sec_type in release["release-group"]["secondary-type-list"]:
albumtypes.append(sec_type.lower())
info.albumtypes = albumtypes
# Release events.
info.country, release_date = _preferred_release_event(release)
- release_group_date = release['release-group'].get('first-release-date')
+ release_group_date = release["release-group"].get("first-release-date")
if not release_date:
# Fall back if release-specific date is not available.
release_date = release_group_date
@@ -540,79 +579,80 @@ def album_info(release: Dict) -> beets.autotag.hooks.AlbumInfo:
_set_date_str(info, release_group_date, True)
# Label name.
- if release.get('label-info-list'):
- label_info = release['label-info-list'][0]
- if label_info.get('label'):
- label = label_info['label']['name']
- if label != '[no label]':
+ if release.get("label-info-list"):
+ label_info = release["label-info-list"][0]
+ if label_info.get("label"):
+ label = label_info["label"]["name"]
+ if label != "[no label]":
info.label = label
- info.catalognum = label_info.get('catalog-number')
+ info.catalognum = label_info.get("catalog-number")
# Text representation data.
- if release.get('text-representation'):
- rep = release['text-representation']
- info.script = rep.get('script')
- info.language = rep.get('language')
+ if release.get("text-representation"):
+ rep = release["text-representation"]
+ info.script = rep.get("script")
+ info.language = rep.get("language")
# Media (format).
- if release['medium-list']:
- first_medium = release['medium-list'][0]
- info.media = first_medium.get('format')
+ if release["medium-list"]:
+ first_medium = release["medium-list"][0]
+ info.media = first_medium.get("format")
- if config['musicbrainz']['genres']:
+ if config["musicbrainz"]["genres"]:
sources = [
- release['release-group'].get('genre-list', []),
- release.get('genre-list', []),
+ release["release-group"].get("genre-list", []),
+ release.get("genre-list", []),
]
genres: Counter[str] = Counter()
for source in sources:
for genreitem in source:
- genres[genreitem['name']] += int(genreitem['count'])
- info.genre = '; '.join(
- genre for genre, _count
- in sorted(genres.items(), key=lambda g: -g[1])
+ genres[genreitem["name"]] += int(genreitem["count"])
+ info.genre = "; ".join(
+ genre
+ for genre, _count in sorted(genres.items(), key=lambda g: -g[1])
)
# We might find links to external sources (Discogs, Bandcamp, ...)
- if (any(config['musicbrainz']['external_ids'].get().values())
- and release.get('url-relation-list')):
+ if any(
+ config["musicbrainz"]["external_ids"].get().values()
+ ) and release.get("url-relation-list"):
discogs_url, bandcamp_url, spotify_url = None, None, None
deezer_url, beatport_url, tidal_url = None, None, None
fetch_discogs, fetch_bandcamp, fetch_spotify = False, False, False
fetch_deezer, fetch_beatport, fetch_tidal = False, False, False
- if config['musicbrainz']['external_ids']['discogs'].get():
+ if config["musicbrainz"]["external_ids"]["discogs"].get():
fetch_discogs = True
- if config['musicbrainz']['external_ids']['bandcamp'].get():
+ if config["musicbrainz"]["external_ids"]["bandcamp"].get():
fetch_bandcamp = True
- if config['musicbrainz']['external_ids']['spotify'].get():
+ if config["musicbrainz"]["external_ids"]["spotify"].get():
fetch_spotify = True
- if config['musicbrainz']['external_ids']['deezer'].get():
+ if config["musicbrainz"]["external_ids"]["deezer"].get():
fetch_deezer = True
- if config['musicbrainz']['external_ids']['beatport'].get():
+ if config["musicbrainz"]["external_ids"]["beatport"].get():
fetch_beatport = True
- if config['musicbrainz']['external_ids']['tidal'].get():
+ if config["musicbrainz"]["external_ids"]["tidal"].get():
fetch_tidal = True
- for url in release['url-relation-list']:
- if fetch_discogs and url['type'] == 'discogs':
- log.debug('Found link to Discogs release via MusicBrainz')
- discogs_url = url['target']
- if fetch_bandcamp and 'bandcamp.com' in url['target']:
- log.debug('Found link to Bandcamp release via MusicBrainz')
- bandcamp_url = url['target']
- if fetch_spotify and 'spotify.com' in url['target']:
- log.debug('Found link to Spotify album via MusicBrainz')
- spotify_url = url['target']
- if fetch_deezer and 'deezer.com' in url['target']:
- log.debug('Found link to Deezer album via MusicBrainz')
- deezer_url = url['target']
- if fetch_beatport and 'beatport.com' in url['target']:
- log.debug('Found link to Beatport release via MusicBrainz')
- beatport_url = url['target']
- if fetch_tidal and 'tidal.com' in url['target']:
- log.debug('Found link to Tidal release via MusicBrainz')
- tidal_url = url['target']
+ for url in release["url-relation-list"]:
+ if fetch_discogs and url["type"] == "discogs":
+ log.debug("Found link to Discogs release via MusicBrainz")
+ discogs_url = url["target"]
+ if fetch_bandcamp and "bandcamp.com" in url["target"]:
+ log.debug("Found link to Bandcamp release via MusicBrainz")
+ bandcamp_url = url["target"]
+ if fetch_spotify and "spotify.com" in url["target"]:
+ log.debug("Found link to Spotify album via MusicBrainz")
+ spotify_url = url["target"]
+ if fetch_deezer and "deezer.com" in url["target"]:
+ log.debug("Found link to Deezer album via MusicBrainz")
+ deezer_url = url["target"]
+ if fetch_beatport and "beatport.com" in url["target"]:
+ log.debug("Found link to Beatport release via MusicBrainz")
+ beatport_url = url["target"]
+ if fetch_tidal and "tidal.com" in url["target"]:
+ log.debug("Found link to Tidal release via MusicBrainz")
+ tidal_url = url["target"]
if discogs_url:
info.discogs_albumid = extract_discogs_id_regex(discogs_url)
@@ -620,17 +660,20 @@ def album_info(release: Dict) -> beets.autotag.hooks.AlbumInfo:
info.bandcamp_album_id = bandcamp_url
if spotify_url:
info.spotify_album_id = MetadataSourcePlugin._get_id(
- 'album', spotify_url, spotify_id_regex)
+ "album", spotify_url, spotify_id_regex
+ )
if deezer_url:
info.deezer_album_id = MetadataSourcePlugin._get_id(
- 'album', deezer_url, deezer_id_regex)
+ "album", deezer_url, deezer_id_regex
+ )
if beatport_url:
info.beatport_album_id = MetadataSourcePlugin._get_id(
- 'album', beatport_url, beatport_id_regex)
+ "album", beatport_url, beatport_id_regex
+ )
if tidal_url:
- info.tidal_album_id = tidal_url.split('/')[-1]
+ info.tidal_album_id = tidal_url.split("/")[-1]
- extra_albumdatas = plugins.send('mb_album_extract', data=release)
+ extra_albumdatas = plugins.send("mb_album_extract", data=release)
for extra_albumdata in extra_albumdatas:
info.update(extra_albumdata)
@@ -639,10 +682,10 @@ def album_info(release: Dict) -> beets.autotag.hooks.AlbumInfo:
def match_album(
- artist: str,
- album: str,
- tracks: Optional[int] = None,
- extra_tags: Optional[Dict[str, Any]] = None,
+ artist: str,
+ album: str,
+ tracks: Optional[int] = None,
+ extra_tags: Optional[Dict[str, Any]] = None,
) -> Iterator[beets.autotag.hooks.AlbumInfo]:
"""Searches for a single album ("release" in MusicBrainz parlance)
and returns an iterator over AlbumInfo objects. May raise a
@@ -652,22 +695,22 @@ def match_album(
optionally, a number of tracks on the album and any other extra tags.
"""
# Build search criteria.
- criteria = {'release': album.lower().strip()}
+ criteria = {"release": album.lower().strip()}
if artist is not None:
- criteria['artist'] = artist.lower().strip()
+ criteria["artist"] = artist.lower().strip()
else:
# Various Artists search.
- criteria['arid'] = VARIOUS_ARTISTS_ID
+ criteria["arid"] = VARIOUS_ARTISTS_ID
if tracks is not None:
- criteria['tracks'] = str(tracks)
+ criteria["tracks"] = str(tracks)
# Additional search cues from existing metadata.
if extra_tags:
for tag, value in extra_tags.items():
key = FIELDS_TO_MB_KEYS[tag]
value = str(value).lower().strip()
- if key == 'catno':
- value = value.replace(' ', '')
+ if key == "catno":
+ value = value.replace(" ", "")
if value:
criteria[key] = value
@@ -676,30 +719,32 @@ def match_album(
return
try:
- log.debug('Searching for MusicBrainz releases with: {!r}', criteria)
+ log.debug("Searching for MusicBrainz releases with: {!r}", criteria)
res = musicbrainzngs.search_releases(
- limit=config['musicbrainz']['searchlimit'].get(int), **criteria)
+ limit=config["musicbrainz"]["searchlimit"].get(int), **criteria
+ )
except musicbrainzngs.MusicBrainzError as exc:
- raise MusicBrainzAPIError(exc, 'release search', criteria,
- traceback.format_exc())
- for release in res['release-list']:
+ raise MusicBrainzAPIError(
+ exc, "release search", criteria, traceback.format_exc()
+ )
+ for release in res["release-list"]:
# The search result is missing some data (namely, the tracks),
# so we just use the ID and fetch the rest of the information.
- albuminfo = album_for_id(release['id'])
+ albuminfo = album_for_id(release["id"])
if albuminfo is not None:
yield albuminfo
def match_track(
- artist: str,
- title: str,
+ artist: str,
+ title: str,
) -> Iterator[beets.autotag.hooks.TrackInfo]:
"""Searches for a single track and returns an iterable of TrackInfo
objects. May raise a MusicBrainzAPIError.
"""
criteria = {
- 'artist': artist.lower().strip(),
- 'recording': title.lower().strip(),
+ "artist": artist.lower().strip(),
+ "recording": title.lower().strip(),
}
if not any(criteria.values()):
@@ -707,11 +752,13 @@ def match_track(
try:
res = musicbrainzngs.search_recordings(
- limit=config['musicbrainz']['searchlimit'].get(int), **criteria)
+ limit=config["musicbrainz"]["searchlimit"].get(int), **criteria
+ )
except musicbrainzngs.MusicBrainzError as exc:
- raise MusicBrainzAPIError(exc, 'recording search', criteria,
- traceback.format_exc())
- for recording in res['recording-list']:
+ raise MusicBrainzAPIError(
+ exc, "recording search", criteria, traceback.format_exc()
+ )
+ for recording in res["recording-list"]:
yield track_info(recording)
@@ -720,21 +767,22 @@ def _parse_id(s: str) -> Optional[str]:
no ID can be found, return None.
"""
# Find the first thing that looks like a UUID/MBID.
- match = re.search('[a-f0-9]{8}(-[a-f0-9]{4}){3}-[a-f0-9]{12}', s)
+ match = re.search("[a-f0-9]{8}(-[a-f0-9]{4}){3}-[a-f0-9]{12}", s)
if match is not None:
return match.group() if match else None
return None
def _is_translation(r):
- _trans_key = 'transl-tracklisting'
- return r['type'] == _trans_key and r['direction'] == "backward"
+ _trans_key = "transl-tracklisting"
+ return r["type"] == _trans_key and r["direction"] == "backward"
-def _find_actual_release_from_pseudo_release(pseudo_rel: Dict) \
- -> Optional[Dict]:
+def _find_actual_release_from_pseudo_release(
+ pseudo_rel: Dict,
+) -> Optional[Dict]:
try:
- relations = pseudo_rel['release']["release-relation-list"]
+ relations = pseudo_rel["release"]["release-relation-list"]
except KeyError:
return None
@@ -744,15 +792,13 @@ def _find_actual_release_from_pseudo_release(pseudo_rel: Dict) \
if not translations:
return None
- actual_id = translations[0]['target']
+ actual_id = translations[0]["target"]
- return musicbrainzngs.get_release_by_id(actual_id,
- RELEASE_INCLUDES)
+ return musicbrainzngs.get_release_by_id(actual_id, RELEASE_INCLUDES)
def _merge_pseudo_and_actual_album(
- pseudo: beets.autotag.hooks.AlbumInfo,
- actual: beets.autotag.hooks.AlbumInfo
+ pseudo: beets.autotag.hooks.AlbumInfo, actual: beets.autotag.hooks.AlbumInfo
) -> Optional[beets.autotag.hooks.AlbumInfo]:
"""
Merges a pseudo release with its actual release.
@@ -767,22 +813,25 @@ def _merge_pseudo_and_actual_album(
hence why we did not implement that for now.
"""
merged = pseudo.copy()
- from_actual = {k: actual[k] for k in [
- "media",
- "mediums",
- "country",
- "catalognum",
- "year",
- "month",
- "day",
- "original_year",
- "original_month",
- "original_day",
- "label",
- "asin",
- "style",
- "genre"
- ]}
+ from_actual = {
+ k: actual[k]
+ for k in [
+ "media",
+ "mediums",
+ "country",
+ "catalognum",
+ "year",
+ "month",
+ "day",
+ "original_year",
+ "original_month",
+ "original_day",
+ "label",
+ "asin",
+ "style",
+ "genre",
+ ]
+ }
merged.update(from_actual)
return merged
@@ -792,34 +841,34 @@ def album_for_id(releaseid: str) -> Optional[beets.autotag.hooks.AlbumInfo]:
object or None if the album is not found. May raise a
MusicBrainzAPIError.
"""
- log.debug('Requesting MusicBrainz release {}', releaseid)
+ log.debug("Requesting MusicBrainz release {}", releaseid)
albumid = _parse_id(releaseid)
if not albumid:
- log.debug('Invalid MBID ({0}).', releaseid)
+ log.debug("Invalid MBID ({0}).", releaseid)
return None
try:
- res = musicbrainzngs.get_release_by_id(albumid,
- RELEASE_INCLUDES)
+ res = musicbrainzngs.get_release_by_id(albumid, RELEASE_INCLUDES)
# resolve linked release relations
actual_res = None
- if res['release'].get('status') == 'Pseudo-Release':
+ if res["release"].get("status") == "Pseudo-Release":
actual_res = _find_actual_release_from_pseudo_release(res)
except musicbrainzngs.ResponseError:
- log.debug('Album ID match failed.')
+ log.debug("Album ID match failed.")
return None
except musicbrainzngs.MusicBrainzError as exc:
- raise MusicBrainzAPIError(exc, 'get release by ID', albumid,
- traceback.format_exc())
+ raise MusicBrainzAPIError(
+ exc, "get release by ID", albumid, traceback.format_exc()
+ )
# release is potentially a pseudo release
- release = album_info(res['release'])
+ release = album_info(res["release"])
# should be None unless we're dealing with a pseudo release
if actual_res is not None:
- actual_release = album_info(actual_res['release'])
+ actual_release = album_info(actual_res["release"])
return _merge_pseudo_and_actual_album(release, actual_release)
else:
return release
@@ -831,14 +880,15 @@ def track_for_id(releaseid: str) -> Optional[beets.autotag.hooks.TrackInfo]:
"""
trackid = _parse_id(releaseid)
if not trackid:
- log.debug('Invalid MBID ({0}).', releaseid)
+ log.debug("Invalid MBID ({0}).", releaseid)
return None
try:
res = musicbrainzngs.get_recording_by_id(trackid, TRACK_INCLUDES)
except musicbrainzngs.ResponseError:
- log.debug('Track ID match failed.')
+ log.debug("Track ID match failed.")
return None
except musicbrainzngs.MusicBrainzError as exc:
- raise MusicBrainzAPIError(exc, 'get recording by ID', trackid,
- traceback.format_exc())
- return track_info(res['recording'])
+ raise MusicBrainzAPIError(
+ exc, "get recording by ID", trackid, traceback.format_exc()
+ )
+ return track_info(res["recording"])
diff --git a/beets/dbcore/__init__.py b/beets/dbcore/__init__.py
index 7cca828bdd..985b4eb809 100644
--- a/beets/dbcore/__init__.py
+++ b/beets/dbcore/__init__.py
@@ -16,12 +16,21 @@
Library.
"""
-from .db import Model, Database
-from .query import Query, FieldQuery, MatchQuery, NamedQuery, AndQuery, OrQuery
+from .db import Database, Model
+from .query import (
+ AndQuery,
+ FieldQuery,
+ InvalidQueryError,
+ MatchQuery,
+ NamedQuery,
+ OrQuery,
+ Query,
+)
+from .queryparse import (
+ parse_sorted_query,
+ query_from_strings,
+ sort_from_strings,
+)
from .types import Type
-from .queryparse import query_from_strings
-from .queryparse import sort_from_strings
-from .queryparse import parse_sorted_query
-from .query import InvalidQueryError
# flake8: noqa
diff --git a/beets/dbcore/db.py b/beets/dbcore/db.py
index 8635dd1c12..fd6dd0c10a 100755
--- a/beets/dbcore/db.py
+++ b/beets/dbcore/db.py
@@ -16,20 +16,20 @@
"""
from __future__ import annotations
-from abc import ABC
-import time
+
+import contextlib
import os
import re
-from collections import defaultdict
-import threading
import sqlite3
-import contextlib
+import threading
+import time
+from abc import ABC
+from collections import defaultdict
from sqlite3 import Connection
from types import TracebackType
from typing import (
Any,
Callable,
- cast,
DefaultDict,
Dict,
Generator,
@@ -45,18 +45,26 @@
Type,
TypeVar,
Union,
+ cast,
)
from unidecode import unidecode
import beets
-from beets.util import functemplate
-from beets.util import py3_path
-from . import types
-from .query import MatchQuery, NullSort, TrueQuery, AndQuery, Query, \
- FieldQuery, Sort, FieldSort
+from beets.util import functemplate, py3_path
from ..util.functemplate import Template
+from . import types
+from .query import (
+ AndQuery,
+ FieldQuery,
+ FieldSort,
+ MatchQuery,
+ NullSort,
+ Query,
+ Sort,
+ TrueQuery,
+)
class DBAccessError(Exception):
@@ -82,13 +90,13 @@ class FormattedMapping(Mapping[str, str]):
are replaced.
"""
- ALL_KEYS = '*'
+ ALL_KEYS = "*"
def __init__(
- self,
- model: Model,
- included_keys: str = ALL_KEYS,
- for_path: bool = False,
+ self,
+ model: Model,
+ included_keys: str = ALL_KEYS,
+ for_path: bool = False,
):
self.for_path = for_path
self.model = model
@@ -117,8 +125,7 @@ def get( # type: ignore
key: str,
default: Optional[str] = None,
) -> str:
- """Similar to Mapping.get(key, default), but always formats to str.
- """
+ """Similar to Mapping.get(key, default), but always formats to str."""
if default is None:
default = self.model._type(key).format(None)
return super().get(key, default)
@@ -126,14 +133,14 @@ def get( # type: ignore
def _get_formatted(self, model: Model, key: str) -> str:
value = model._type(key).format(model.get(key))
if isinstance(value, bytes):
- value = value.decode('utf-8', 'ignore')
+ value = value.decode("utf-8", "ignore")
if self.for_path:
- sep_repl = cast(str, beets.config['path_sep_replace'].as_str())
- sep_drive = cast(str, beets.config['drive_sep_replace'].as_str())
+ sep_repl = cast(str, beets.config["path_sep_replace"].as_str())
+ sep_drive = cast(str, beets.config["drive_sep_replace"].as_str())
- if re.match(r'^\w:', value):
- value = re.sub(r'(?<=^\w):', sep_drive, value)
+ if re.match(r"^\w:", value):
+ value = re.sub(r"(?<=^\w):", sep_drive, value)
for sep in (os.path.sep, os.path.altsep):
if sep:
@@ -152,30 +159,25 @@ def _get_formatted(self, model: Model, key: str) -> str:
# `LazyConvertDict` becomes safe during iteration. Some code does in fact rely
# on this.
class LazyConvertDict:
- """Lazily convert types for attributes fetched from the database
- """
+ """Lazily convert types for attributes fetched from the database"""
- def __init__(self, model_cls: 'Model'):
- """Initialize the object empty
- """
+ def __init__(self, model_cls: "Model"):
+ """Initialize the object empty"""
# FIXME: Dict[str, SQLiteType]
self._data: Dict[str, Any] = {}
self.model_cls = model_cls
self._converted: Dict[str, Any] = {}
def init(self, data: Dict[str, Any]):
- """Set the base data that should be lazily converted
- """
+ """Set the base data that should be lazily converted"""
self._data = data
def _convert(self, key: str, value: Any):
- """Convert the attribute type according to the SQL type
- """
+ """Convert the attribute type according to the SQL type"""
return self.model_cls._type(key).from_sql(value)
def __setitem__(self, key: str, value: Any):
- """Set an attribute value, assume it's already converted
- """
+ """Set an attribute value, assume it's already converted"""
self._converted[key] = value
def __getitem__(self, key: str) -> Any:
@@ -190,21 +192,18 @@ def __getitem__(self, key: str) -> Any:
return value
def __delitem__(self, key: str):
- """Delete both converted and base data
- """
+ """Delete both converted and base data"""
if key in self._converted:
del self._converted[key]
if key in self._data:
del self._data[key]
def keys(self) -> List[str]:
- """Get a list of available field names for this object.
- """
+ """Get a list of available field names for this object."""
return list(self._converted.keys()) + list(self._data.keys())
def copy(self) -> LazyConvertDict:
- """Create a copy of the object.
- """
+ """Create a copy of the object."""
new = self.__class__(self.model_cls)
new._data = self._data.copy()
new._converted = self._converted.copy()
@@ -213,8 +212,7 @@ def copy(self) -> LazyConvertDict:
# Act like a dictionary.
def update(self, values: Mapping[str, Any]):
- """Assign all values in the given dict.
- """
+ """Assign all values in the given dict."""
for key, value in values.items():
self[key] = value
@@ -235,8 +233,7 @@ def get(self, key: str, default: Optional[Any] = None):
return default
def __contains__(self, key: Any) -> bool:
- """Determine whether `key` is an attribute on this object.
- """
+ """Determine whether `key` is an attribute on this object."""
return key in self._converted or key in self._data
def __iter__(self) -> Iterator[str]:
@@ -257,6 +254,7 @@ def __len__(self) -> int:
# Abstract base for model classes.
+
class Model(ABC):
"""An abstract object representing an object in the database. Model
objects act like dictionaries (i.e., they allow subscript access like
@@ -327,9 +325,8 @@ class Model(ABC):
"""
@classmethod
- def _getters(cls: Type['Model']):
- """Return a mapping from field names to getter functions.
- """
+ def _getters(cls: Type["Model"]):
+ """Return a mapping from field names to getter functions."""
# We could cache this if it becomes a performance problem to
# gather the getter mapping every time.
raise NotImplementedError()
@@ -358,10 +355,10 @@ def __init__(self, db: Optional[Database] = None, **values):
@classmethod
def _awaken(
- cls: Type[AnyModel],
- db: Optional[Database] = None,
- fixed_values: Dict[str, Any] = {},
- flex_values: Dict[str, Any] = {},
+ cls: Type[AnyModel],
+ db: Optional[Database] = None,
+ fixed_values: Dict[str, Any] = {},
+ flex_values: Dict[str, Any] = {},
) -> AnyModel:
"""Create an object with values drawn from the database.
@@ -376,9 +373,9 @@ def _awaken(
return obj
def __repr__(self) -> str:
- return '{}({})'.format(
+ return "{}({})".format(
type(self).__name__,
- ', '.join(f'{k}={v!r}' for k, v in dict(self).items()),
+ ", ".join(f"{k}={v!r}" for k, v in dict(self).items()),
)
def clear_dirty(self):
@@ -395,15 +392,13 @@ def _check_db(self, need_id: bool = True) -> Database:
exception is raised otherwise.
"""
if not self._db:
- raise ValueError(
- '{} has no database'.format(type(self).__name__)
- )
+ raise ValueError("{} has no database".format(type(self).__name__))
if need_id and not self.id:
- raise ValueError('{} has no id'.format(type(self).__name__))
+ raise ValueError("{} has no id".format(type(self).__name__))
return self._db
- def copy(self) -> 'Model':
+ def copy(self) -> "Model":
"""Create a copy of the model object.
The field values and other state is duplicated, but the new copy
@@ -479,22 +474,20 @@ def _setitem(self, key, value):
return changed
def __setitem__(self, key, value):
- """Assign the value for a field.
- """
+ """Assign the value for a field."""
self._setitem(key, value)
def __delitem__(self, key):
- """Remove a flexible attribute from the model.
- """
+ """Remove a flexible attribute from the model."""
if key in self._values_flex: # Flexible.
del self._values_flex[key]
self._dirty.add(key) # Mark for dropping on store.
elif key in self._fields: # Fixed
setattr(self, key, self._type(key).null)
elif key in self._getters(): # Computed.
- raise KeyError(f'computed field {key} cannot be deleted')
+ raise KeyError(f"computed field {key} cannot be deleted")
else:
- raise KeyError(f'no such field {key}')
+ raise KeyError(f"no such field {key}")
def keys(self, computed: bool = False):
"""Get a list of available field names for this object. The
@@ -517,8 +510,7 @@ def all_keys(cls):
# Act like a dictionary.
def update(self, values):
- """Assign all values in the given dict.
- """
+ """Assign all values in the given dict."""
for key, value in values.items():
self[key] = value
@@ -530,8 +522,7 @@ def items(self) -> Iterator[Tuple[str, Any]]:
yield key, self[key]
def __contains__(self, key) -> bool:
- """Determine whether `key` is an attribute on this object.
- """
+ """Determine whether `key` is an attribute on this object."""
return key in self.keys(computed=True)
def __iter__(self) -> Iterator[str]:
@@ -543,22 +534,22 @@ def __iter__(self) -> Iterator[str]:
# Convenient attribute access.
def __getattr__(self, key):
- if key.startswith('_'):
- raise AttributeError(f'model has no attribute {key!r}')
+ if key.startswith("_"):
+ raise AttributeError(f"model has no attribute {key!r}")
else:
try:
return self[key]
except KeyError:
- raise AttributeError(f'no such field {key!r}')
+ raise AttributeError(f"no such field {key!r}")
def __setattr__(self, key, value):
- if key.startswith('_'):
+ if key.startswith("_"):
super().__setattr__(key, value)
else:
self[key] = value
def __delattr__(self, key):
- if key.startswith('_'):
+ if key.startswith("_"):
super().__delattr__(key)
else:
del self[key]
@@ -578,18 +569,17 @@ def store(self, fields: Optional[Iterable[str]] = None):
assignments = []
subvars = []
for key in fields:
- if key != 'id' and key in self._dirty:
+ if key != "id" and key in self._dirty:
self._dirty.remove(key)
- assignments.append(key + '=?')
+ assignments.append(key + "=?")
value = self._type(key).to_sql(self[key])
subvars.append(value)
with db.transaction() as tx:
# Main table update.
if assignments:
- query = 'UPDATE {} SET {} WHERE id=?'.format(
- self._table,
- ','.join(assignments)
+ query = "UPDATE {} SET {} WHERE id=?".format(
+ self._table, ",".join(assignments)
)
subvars.append(self.id)
tx.mutate(query, subvars)
@@ -599,18 +589,18 @@ def store(self, fields: Optional[Iterable[str]] = None):
if key in self._dirty:
self._dirty.remove(key)
tx.mutate(
- 'INSERT INTO {} '
- '(entity_id, key, value) '
- 'VALUES (?, ?, ?);'.format(self._flex_table),
+ "INSERT INTO {} "
+ "(entity_id, key, value) "
+ "VALUES (?, ?, ?);".format(self._flex_table),
(self.id, key, value),
)
# Deleted flexible attributes.
for key in self._dirty:
tx.mutate(
- 'DELETE FROM {} '
- 'WHERE entity_id=? AND key=?'.format(self._flex_table),
- (self.id, key)
+ "DELETE FROM {} "
+ "WHERE entity_id=? AND key=?".format(self._flex_table),
+ (self.id, key),
)
self.clear_dirty()
@@ -633,20 +623,15 @@ def load(self):
self.clear_dirty()
def remove(self):
- """Remove the object's associated rows from the database.
- """
+ """Remove the object's associated rows from the database."""
db = self._check_db()
with db.transaction() as tx:
+ tx.mutate(f"DELETE FROM {self._table} WHERE id=?", (self.id,))
tx.mutate(
- f'DELETE FROM {self._table} WHERE id=?',
- (self.id,)
- )
- tx.mutate(
- f'DELETE FROM {self._flex_table} WHERE entity_id=?',
- (self.id,)
+ f"DELETE FROM {self._flex_table} WHERE entity_id=?", (self.id,)
)
- def add(self, db: Optional['Database'] = None):
+ def add(self, db: Optional["Database"] = None):
"""Add the object to the library database. This object must be
associated with a database; you can provide one via the `db`
parameter or use the currently associated database.
@@ -659,9 +644,7 @@ def add(self, db: Optional['Database'] = None):
db = self._check_db(False)
with db.transaction() as tx:
- new_id = tx.mutate(
- f'INSERT INTO {self._table} DEFAULT VALUES'
- )
+ new_id = tx.mutate(f"INSERT INTO {self._table} DEFAULT VALUES")
self.id = new_id
self.added = time.time()
@@ -676,9 +659,9 @@ def add(self, db: Optional['Database'] = None):
_formatter = FormattedMapping
def formatted(
- self,
- included_keys: str = _formatter.ALL_KEYS,
- for_path: bool = False,
+ self,
+ included_keys: str = _formatter.ALL_KEYS,
+ for_path: bool = False,
):
"""Get a mapping containing all values on this object formatted
as human-readable unicode strings.
@@ -686,9 +669,9 @@ def formatted(
return self._formatter(self, included_keys, for_path)
def evaluate_template(
- self,
- template: Union[str, Template],
- for_path: bool = False,
+ self,
+ template: Union[str, Template],
+ for_path: bool = False,
) -> str:
"""Evaluate a template (a string or a `Template` object) using
the object's fields. If `for_path` is true, then no new path
@@ -700,42 +683,41 @@ def evaluate_template(
else:
# Help out mypy
t = template
- return t.substitute(self.formatted(for_path=for_path),
- self._template_funcs())
+ return t.substitute(
+ self.formatted(for_path=for_path), self._template_funcs()
+ )
# Parsing.
@classmethod
def _parse(cls, key, string: str) -> Any:
- """Parse a string as a value for the given key.
- """
+ """Parse a string as a value for the given key."""
if not isinstance(string, str):
raise TypeError("_parse() argument must be a string")
return cls._type(key).parse(string)
def set_parse(self, key, string: str):
- """Set the object's key to a value represented by a string.
- """
+ """Set the object's key to a value represented by a string."""
self[key] = self._parse(key, string)
# Convenient queries.
@classmethod
def field_query(
- cls,
- field,
- pattern,
- query_cls: Type[FieldQuery] = MatchQuery,
+ cls,
+ field,
+ pattern,
+ query_cls: Type[FieldQuery] = MatchQuery,
) -> FieldQuery:
"""Get a `FieldQuery` for this model."""
return query_cls(field, pattern, field in cls._fields)
@classmethod
def all_fields_query(
- cls: Type['Model'],
- pats: Mapping,
- query_cls: Type[FieldQuery] = MatchQuery,
+ cls: Type["Model"],
+ pats: Mapping,
+ query_cls: Type[FieldQuery] = MatchQuery,
):
"""Get a query that matches many fields with different patterns.
@@ -743,8 +725,7 @@ def all_fields_query(
resulting query is a conjunction ("and") of per-field queries
for all of these field/pattern pairs.
"""
- subqueries = [cls.field_query(k, v, query_cls)
- for k, v in pats.items()]
+ subqueries = [cls.field_query(k, v, query_cls) for k, v in pats.items()]
return AndQuery(subqueries)
@@ -760,13 +741,13 @@ class Results(Generic[AnyModel]):
"""
def __init__(
- self,
- model_class: Type[AnyModel],
- rows: List[Mapping],
- db: 'Database',
- flex_rows,
- query: Optional[Query] = None,
- sort=None,
+ self,
+ model_class: Type[AnyModel],
+ rows: List[Mapping],
+ db: "Database",
+ flex_rows,
+ query: Optional[Query] = None,
+ sort=None,
):
"""Create a result set that will construct objects of type
`model_class`.
@@ -825,7 +806,7 @@ def _get_objects(self) -> Iterator[AnyModel]:
else:
while self._rows:
row = self._rows.pop(0)
- obj = self._make_model(row, flex_attrs.get(row['id'], {}))
+ obj = self._make_model(row, flex_attrs.get(row["id"], {}))
# If there is a slow-query predicate, ensurer that the
# object passes it.
if not self.query or self.query.match(obj):
@@ -848,31 +829,27 @@ def __iter__(self) -> Iterator[AnyModel]:
return self._get_objects()
def _get_indexed_flex_attrs(self) -> Mapping:
- """ Index flexible attributes by the entity id they belong to
- """
+ """Index flexible attributes by the entity id they belong to"""
flex_values: Dict[int, Dict[str, Any]] = {}
for row in self.flex_rows:
- if row['entity_id'] not in flex_values:
- flex_values[row['entity_id']] = {}
+ if row["entity_id"] not in flex_values:
+ flex_values[row["entity_id"]] = {}
- flex_values[row['entity_id']][row['key']] = row['value']
+ flex_values[row["entity_id"]][row["key"]] = row["value"]
return flex_values
def _make_model(self, row, flex_values: Dict = {}) -> AnyModel:
- """ Create a Model object for the given row
- """
+ """Create a Model object for the given row"""
cols = dict(row)
- values = {k: v for (k, v) in cols.items()
- if not k[:4] == 'flex'}
+ values = {k: v for (k, v) in cols.items() if not k[:4] == "flex"}
# Construct the Python object
obj = self.model_class._awaken(self.db, values, flex_values)
return obj
def __len__(self) -> int:
- """Get the number of matching objects.
- """
+ """Get the number of matching objects."""
if not self._rows:
# Fully materialized. Just count the objects.
return len(self._objects)
@@ -889,13 +866,11 @@ def __len__(self) -> int:
return self._row_count
def __nonzero__(self) -> bool:
- """Does this result contain any objects?
- """
+ """Does this result contain any objects?"""
return self.__bool__()
def __bool__(self) -> bool:
- """Does this result contain any objects?
- """
+ """Does this result contain any objects?"""
return bool(len(self))
def __getitem__(self, n):
@@ -913,7 +888,7 @@ def __getitem__(self, n):
next(it)
return next(it)
except StopIteration:
- raise IndexError(f'result index {n} out of range')
+ raise IndexError(f"result index {n} out of range")
def get(self) -> Optional[AnyModel]:
"""Return the first matching object, or None if no objects
@@ -936,10 +911,10 @@ class Transaction:
current transaction.
"""
- def __init__(self, db: 'Database'):
+ def __init__(self, db: "Database"):
self.db = db
- def __enter__(self) -> 'Transaction':
+ def __enter__(self) -> "Transaction":
"""Begin a transaction. This transaction may be created while
another is active in a different thread.
"""
@@ -953,10 +928,10 @@ def __enter__(self) -> 'Transaction':
return self
def __exit__(
- self,
- exc_type: Type[Exception],
- exc_value: Exception,
- traceback: TracebackType,
+ self,
+ exc_type: Type[Exception],
+ exc_value: Exception,
+ traceback: TracebackType,
):
"""Complete a transaction. This must be the most recently
entered but not yet exited transaction. If it is the last active
@@ -990,8 +965,10 @@ def mutate(self, statement: str, subvals: Sequence = ()) -> Any:
# In two specific cases, SQLite reports an error while accessing
# the underlying database file. We surface these exceptions as
# DBAccessError so the application can abort.
- if e.args[0] in ("attempt to write a readonly database",
- "unable to open database file"):
+ if e.args[0] in (
+ "attempt to write a readonly database",
+ "unable to open database file",
+ ):
raise DBAccessError(e.args[0])
else:
raise
@@ -1015,7 +992,7 @@ class Database:
"""The Model subclasses representing tables in this database.
"""
- supports_extensions = hasattr(sqlite3.Connection, 'enable_load_extension')
+ supports_extensions = hasattr(sqlite3.Connection, "enable_load_extension")
"""Whether or not the current version of SQLite supports extensions"""
revision = 0
@@ -1033,8 +1010,7 @@ def __init__(self, path, timeout: float = 5.0):
self.timeout = timeout
self._connections: Dict[int, sqlite3.Connection] = {}
- self._tx_stacks: DefaultDict[int, List[Transaction]] = \
- defaultdict(list)
+ self._tx_stacks: DefaultDict[int, List[Transaction]] = defaultdict(list)
self._extensions: List[str] = []
# A lock to protect the _connections and _tx_stacks maps, which
@@ -1150,7 +1126,8 @@ def load_extension(self, path: str):
"""Load an SQLite extension into all open connections."""
if not self.supports_extensions:
raise ValueError(
- 'this sqlite3 installation does not support extensions')
+ "this sqlite3 installation does not support extensions"
+ )
self._extensions.append(path)
@@ -1166,7 +1143,7 @@ def _make_table(self, table: str, fields: Mapping[str, types.Type]):
"""
# Get current schema.
with self.transaction() as tx:
- rows = tx.query('PRAGMA table_info(%s)' % table)
+ rows = tx.query("PRAGMA table_info(%s)" % table)
current_fields = {row[1] for row in rows}
field_names = set(fields.keys())
@@ -1178,17 +1155,18 @@ def _make_table(self, table: str, fields: Mapping[str, types.Type]):
# No table exists.
columns = []
for name, typ in fields.items():
- columns.append(f'{name} {typ.sql}')
- setup_sql = 'CREATE TABLE {} ({});\n'.format(table,
- ', '.join(columns))
+ columns.append(f"{name} {typ.sql}")
+ setup_sql = "CREATE TABLE {} ({});\n".format(
+ table, ", ".join(columns)
+ )
else:
# Table exists does not match the field set.
- setup_sql = ''
+ setup_sql = ""
for name, typ in fields.items():
if name in current_fields:
continue
- setup_sql += 'ALTER TABLE {} ADD COLUMN {} {};\n'.format(
+ setup_sql += "ALTER TABLE {} ADD COLUMN {} {};\n".format(
table, name, typ.sql
)
@@ -1200,7 +1178,8 @@ def _make_attribute_table(self, flex_table: str):
for the given entity (if they don't exist).
"""
with self.transaction() as tx:
- tx.script("""
+ tx.script(
+ """
CREATE TABLE IF NOT EXISTS {0} (
id INTEGER PRIMARY KEY,
entity_id INTEGER,
@@ -1209,7 +1188,10 @@ def _make_attribute_table(self, flex_table: str):
UNIQUE(entity_id, key) ON CONFLICT REPLACE);
CREATE INDEX IF NOT EXISTS {0}_by_entity
ON {0} (entity_id);
- """.format(flex_table))
+ """.format(
+ flex_table
+ )
+ )
# Querying.
@@ -1231,21 +1213,20 @@ def _fetch(
sql = ("SELECT * FROM {} WHERE {} {}").format(
model_cls._table,
- where or '1',
- f"ORDER BY {order_by}" if order_by else '',
+ where or "1",
+ f"ORDER BY {order_by}" if order_by else "",
)
# Fetch flexible attributes for items matching the main query.
# Doing the per-item filtering in python is faster than issuing
# one query per item to sqlite.
- flex_sql = ("""
+ flex_sql = """
SELECT * FROM {} WHERE entity_id IN
(SELECT id FROM {} WHERE {});
""".format(
- model_cls._flex_table,
- model_cls._table,
- where or '1',
- )
+ model_cls._flex_table,
+ model_cls._table,
+ where or "1",
)
with self.transaction() as tx:
@@ -1253,7 +1234,10 @@ def _fetch(
flex_rows = tx.query(flex_sql, subvals)
return Results(
- model_cls, rows, self, flex_rows,
+ model_cls,
+ rows,
+ self,
+ flex_rows,
None if where else query, # Slow query component.
sort if sort.is_slow() else None, # Slow sort component.
)
@@ -1266,4 +1250,4 @@ def _get(
"""Get a Model object by its id or None if the id does not
exist.
"""
- return self._fetch(model_cls, MatchQuery('id', id)).get()
+ return self._fetch(model_cls, MatchQuery("id", id)).get()
diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py
index 981b3378d0..03f85ac77c 100644
--- a/beets/dbcore/query.py
+++ b/beets/dbcore/query.py
@@ -17,10 +17,14 @@
from __future__ import annotations
-from abc import ABC, abstractmethod
import re
+import unicodedata
+from abc import ABC, abstractmethod
+from datetime import datetime, timedelta
+from functools import reduce
from operator import mul
from typing import (
+ TYPE_CHECKING,
Any,
Collection,
Generic,
@@ -37,12 +41,6 @@
)
from beets import util
-from datetime import datetime, timedelta
-import unicodedata
-from functools import reduce
-
-from typing import TYPE_CHECKING
-
if TYPE_CHECKING:
from beets.dbcore import Model
@@ -82,8 +80,7 @@ def __init__(self, what, expected, detail=None):
class Query(ABC):
- """An abstract class representing a query into the database.
- """
+ """An abstract class representing a query into the database."""
def clause(self) -> Tuple[Optional[str], Sequence[Any]]:
"""Generate an SQLite expression implementing the query.
@@ -123,6 +120,7 @@ class NamedQuery(Query):
"""Non-field query, i.e. the query prefix is not a field but identifies the
query class.
"""
+
@abstractmethod
def __init__(self, pattern):
...
@@ -167,12 +165,17 @@ def match(self, obj: Model) -> bool:
return self.value_match(self.pattern, obj.get(self.field))
def __repr__(self) -> str:
- return ("{0.__class__.__name__}({0.field!r}, {0.pattern!r}, "
- "{0.fast})".format(self))
+ return (
+ "{0.__class__.__name__}({0.field!r}, {0.pattern!r}, "
+ "{0.fast})".format(self)
+ )
def __eq__(self, other) -> bool:
- return super().__eq__(other) and \
- self.field == other.field and self.pattern == other.pattern
+ return (
+ super().__eq__(other)
+ and self.field == other.field
+ and self.pattern == other.pattern
+ )
def __hash__(self) -> int:
return hash((self.field, hash(self.pattern)))
@@ -233,10 +236,11 @@ class StringQuery(StringFieldQuery[str]):
"""A query that matches a whole string in a specific Model field."""
def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]:
- search = (self.pattern
- .replace('\\', '\\\\')
- .replace('%', '\\%')
- .replace('_', '\\_'))
+ search = (
+ self.pattern.replace("\\", "\\\\")
+ .replace("%", "\\%")
+ .replace("_", "\\_")
+ )
clause = self.field + " like ? escape '\\'"
subvals = [search]
return clause, subvals
@@ -250,11 +254,12 @@ class SubstringQuery(StringFieldQuery[str]):
"""A query that matches a substring in a specific Model field."""
def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]:
- pattern = (self.pattern
- .replace('\\', '\\\\')
- .replace('%', '\\%')
- .replace('_', '\\_'))
- search = '%' + pattern + '%'
+ pattern = (
+ self.pattern.replace("\\", "\\\\")
+ .replace("%", "\\%")
+ .replace("_", "\\_")
+ )
+ search = "%" + pattern + "%"
clause = self.field + " like ? escape '\\'"
subvals = [search]
return clause, subvals
@@ -277,9 +282,9 @@ def __init__(self, field: str, pattern: str, fast: bool = True):
pattern_re = re.compile(pattern)
except re.error as exc:
# Invalid regular expression.
- raise InvalidQueryArgumentValueError(pattern,
- "a regular expression",
- format(exc))
+ raise InvalidQueryArgumentValueError(
+ pattern, "a regular expression", format(exc)
+ )
super().__init__(field, pattern_re, fast)
@@ -291,7 +296,7 @@ def _normalize(s: str) -> str:
"""Normalize a Unicode string's representation (used on both
patterns and matched values).
"""
- return unicodedata.normalize('NFC', s)
+ return unicodedata.normalize("NFC", s)
@classmethod
def string_match(cls, pattern: Pattern, value: str) -> bool:
@@ -304,10 +309,10 @@ class BooleanQuery(MatchQuery[int]):
"""
def __init__(
- self,
- field: str,
- pattern: bool,
- fast: bool = True,
+ self,
+ field: str,
+ pattern: bool,
+ fast: bool = True,
):
if isinstance(pattern, str):
pattern = util.str2bool(pattern)
@@ -330,7 +335,7 @@ def __init__(self, field: str, pattern: Union[bytes, str, memoryview]):
# rather than encoded Unicode.
if isinstance(pattern, (str, bytes)):
if isinstance(pattern, str):
- bytes_pattern = pattern.encode('utf-8')
+ bytes_pattern = pattern.encode("utf-8")
else:
bytes_pattern = pattern
self.buf_pattern = memoryview(bytes_pattern)
@@ -379,7 +384,7 @@ def _convert(self, s: str) -> Union[float, int, None]:
def __init__(self, field: str, pattern: str, fast: bool = True):
super().__init__(field, pattern, fast)
- parts = pattern.split('..', 1)
+ parts = pattern.split("..", 1)
if len(parts) == 1:
# No range.
self.point = self._convert(parts[0])
@@ -409,17 +414,19 @@ def match(self, obj: Model) -> bool:
def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]:
if self.point is not None:
- return self.field + '=?', (self.point,)
+ return self.field + "=?", (self.point,)
else:
if self.rangemin is not None and self.rangemax is not None:
- return ('{0} >= ? AND {0} <= ?'.format(self.field),
- (self.rangemin, self.rangemax))
+ return (
+ "{0} >= ? AND {0} <= ?".format(self.field),
+ (self.rangemin, self.rangemax),
+ )
elif self.rangemin is not None:
- return f'{self.field} >= ?', (self.rangemin,)
+ return f"{self.field} >= ?", (self.rangemin,)
elif self.rangemax is not None:
- return f'{self.field} <= ?', (self.rangemax,)
+ return f"{self.field} <= ?", (self.rangemax,)
else:
- return '1', ()
+ return "1", ()
class CollectionQuery(Query):
@@ -458,17 +465,16 @@ def clause_with_joiner(
if not subq_clause:
# Fall back to slow query.
return None, ()
- clause_parts.append('(' + subq_clause + ')')
+ clause_parts.append("(" + subq_clause + ")")
subvals += subq_subvals
- clause = (' ' + joiner + ' ').join(clause_parts)
+ clause = (" " + joiner + " ").join(clause_parts)
return clause, subvals
def __repr__(self) -> str:
return "{0.__class__.__name__}({0.subqueries!r})".format(self)
def __eq__(self, other) -> bool:
- return super().__eq__(other) and \
- self.subqueries == other.subqueries
+ return super().__eq__(other) and self.subqueries == other.subqueries
def __hash__(self) -> int:
"""Since subqueries are mutable, this object should not be hashable.
@@ -495,7 +501,7 @@ def __init__(self, pattern, fields, cls: Type[FieldQuery]):
super().__init__(subqueries)
def clause(self) -> Tuple[Optional[str], Sequence[SQLiteType]]:
- return self.clause_with_joiner('or')
+ return self.clause_with_joiner("or")
def match(self, obj: Model) -> bool:
for subq in self.subqueries:
@@ -504,12 +510,13 @@ def match(self, obj: Model) -> bool:
return False
def __repr__(self) -> str:
- return ("{0.__class__.__name__}({0.pattern!r}, {0.fields!r}, "
- "{0.query_class.__name__})".format(self))
+ return (
+ "{0.__class__.__name__}({0.pattern!r}, {0.fields!r}, "
+ "{0.query_class.__name__})".format(self)
+ )
def __eq__(self, other) -> bool:
- return super().__eq__(other) and \
- self.query_class == other.query_class
+ return super().__eq__(other) and self.query_class == other.query_class
def __hash__(self) -> int:
return hash((self.pattern, tuple(self.fields), self.query_class))
@@ -519,6 +526,7 @@ class MutableCollectionQuery(CollectionQuery):
"""A collection query whose subqueries may be modified after the
query is initialized.
"""
+
subqueries: MutableSequence
def __setitem__(self, key, value):
@@ -532,7 +540,7 @@ class AndQuery(MutableCollectionQuery):
"""A conjunction of a list of other queries."""
def clause(self) -> Tuple[Optional[str], Sequence[SQLiteType]]:
- return self.clause_with_joiner('and')
+ return self.clause_with_joiner("and")
def match(self, obj: Model) -> bool:
return all(q.match(obj) for q in self.subqueries)
@@ -542,7 +550,7 @@ class OrQuery(MutableCollectionQuery):
"""A conjunction of a list of other queries."""
def clause(self) -> Tuple[Optional[str], Sequence[SQLiteType]]:
- return self.clause_with_joiner('or')
+ return self.clause_with_joiner("or")
def match(self, obj: Model) -> bool:
return any(q.match(obj) for q in self.subqueries)
@@ -559,7 +567,7 @@ def __init__(self, subquery):
def clause(self) -> Tuple[Optional[str], Sequence[SQLiteType]]:
clause, subvals = self.subquery.clause()
if clause:
- return f'not ({clause})', subvals
+ return f"not ({clause})", subvals
else:
# If there is no clause, there is nothing to negate. All the logic
# is handled by match() for slow queries.
@@ -572,18 +580,17 @@ def __repr__(self) -> str:
return "{0.__class__.__name__}({0.subquery!r})".format(self)
def __eq__(self, other) -> bool:
- return super().__eq__(other) and \
- self.subquery == other.subquery
+ return super().__eq__(other) and self.subquery == other.subquery
def __hash__(self) -> int:
- return hash(('not', hash(self.subquery)))
+ return hash(("not", hash(self.subquery)))
class TrueQuery(Query):
"""A query that always matches."""
def clause(self) -> Tuple[str, Sequence[SQLiteType]]:
- return '1', ()
+ return "1", ()
def match(self, obj: Model) -> bool:
return True
@@ -593,7 +600,7 @@ class FalseQuery(Query):
"""A query that never matches."""
def clause(self) -> Tuple[str, Sequence[SQLiteType]]:
- return '0', ()
+ return "0", ()
def match(self, obj: Model) -> bool:
return False
@@ -601,11 +608,12 @@ def match(self, obj: Model) -> bool:
# Time/date queries.
+
def _parse_periods(pattern: str) -> Tuple[Optional[Period], Optional[Period]]:
"""Parse a string containing two dates separated by two dots (..).
Return a pair of `Period` objects.
"""
- parts = pattern.split('..', 1)
+ parts = pattern.split("..", 1)
if len(parts) == 1:
instant = Period.parse(parts[0])
return (instant, instant)
@@ -622,18 +630,19 @@ class Period:
instants of time during January 2014.
"""
- precisions = ('year', 'month', 'day', 'hour', 'minute', 'second')
+ precisions = ("year", "month", "day", "hour", "minute", "second")
date_formats = (
- ('%Y',), # year
- ('%Y-%m',), # month
- ('%Y-%m-%d',), # day
- ('%Y-%m-%dT%H', '%Y-%m-%d %H'), # hour
- ('%Y-%m-%dT%H:%M', '%Y-%m-%d %H:%M'), # minute
- ('%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S') # second
+ ("%Y",), # year
+ ("%Y-%m",), # month
+ ("%Y-%m-%d",), # day
+ ("%Y-%m-%dT%H", "%Y-%m-%d %H"), # hour
+ ("%Y-%m-%dT%H:%M", "%Y-%m-%d %H:%M"), # minute
+ ("%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"), # second
+ )
+ relative_units = {"y": 365, "m": 30, "w": 7, "d": 1}
+ relative_re = (
+ "(?P[+|-]?)(?P[0-9]+)" + "(?P[y|m|w|d])"
)
- relative_units = {'y': 365, 'm': 30, 'w': 7, 'd': 1}
- relative_re = '(?P[+|-]?)(?P[0-9]+)' + \
- '(?P[y|m|w|d])'
def __init__(self, date: datetime, precision: str):
"""Create a period with the given date (a `datetime` object) and
@@ -641,12 +650,12 @@ def __init__(self, date: datetime, precision: str):
or "second").
"""
if precision not in Period.precisions:
- raise ValueError(f'Invalid precision {precision}')
+ raise ValueError(f"Invalid precision {precision}")
self.date = date
self.precision = precision
@classmethod
- def parse(cls: Type['Period'], string: str) -> Optional['Period']:
+ def parse(cls: Type["Period"], string: str) -> Optional["Period"]:
"""Parse a date and return a `Period` object or `None` if the
string is empty, or raise an InvalidQueryArgumentValueError if
the string cannot be parsed to a date.
@@ -663,8 +672,9 @@ def parse(cls: Type['Period'], string: str) -> Optional['Period']:
and a "year" is exactly 365 days.
"""
- def find_date_and_format(string: str) -> \
- Union[Tuple[None, None], Tuple[datetime, int]]:
+ def find_date_and_format(
+ string: str,
+ ) -> Union[Tuple[None, None], Tuple[datetime, int]]:
for ord, format in enumerate(cls.date_formats):
for format_option in format:
try:
@@ -683,23 +693,26 @@ def find_date_and_format(string: str) -> \
# Check for a relative date.
match_dq = re.match(cls.relative_re, string)
if match_dq:
- sign = match_dq.group('sign')
- quantity = match_dq.group('quantity')
- timespan = match_dq.group('timespan')
+ sign = match_dq.group("sign")
+ quantity = match_dq.group("quantity")
+ timespan = match_dq.group("timespan")
# Add or subtract the given amount of time from the current
# date.
- multiplier = -1 if sign == '-' else 1
+ multiplier = -1 if sign == "-" else 1
days = cls.relative_units[timespan]
- date = datetime.now() + \
- timedelta(days=int(quantity) * days) * multiplier
+ date = (
+ datetime.now()
+ + timedelta(days=int(quantity) * days) * multiplier
+ )
return cls(date, cls.precisions[5])
# Check for an absolute date.
date, ordinal = find_date_and_format(string)
if date is None or ordinal is None:
- raise InvalidQueryArgumentValueError(string,
- 'a valid date/time string')
+ raise InvalidQueryArgumentValueError(
+ string, "a valid date/time string"
+ )
precision = cls.precisions[ordinal]
return cls(date, precision)
@@ -709,23 +722,23 @@ def open_right_endpoint(self) -> datetime:
"""
precision = self.precision
date = self.date
- if 'year' == self.precision:
+ if "year" == self.precision:
return date.replace(year=date.year + 1, month=1)
- elif 'month' == precision:
- if (date.month < 12):
+ elif "month" == precision:
+ if date.month < 12:
return date.replace(month=date.month + 1)
else:
return date.replace(year=date.year + 1, month=1)
- elif 'day' == precision:
+ elif "day" == precision:
return date + timedelta(days=1)
- elif 'hour' == precision:
+ elif "hour" == precision:
return date + timedelta(hours=1)
- elif 'minute' == precision:
+ elif "minute" == precision:
return date + timedelta(minutes=1)
- elif 'second' == precision:
+ elif "second" == precision:
return date + timedelta(seconds=1)
else:
- raise ValueError(f'unhandled precision {precision}')
+ raise ValueError(f"unhandled precision {precision}")
class DateInterval:
@@ -737,8 +750,9 @@ class DateInterval:
def __init__(self, start: Optional[datetime], end: Optional[datetime]):
if start is not None and end is not None and not start < end:
- raise ValueError("start date {} is not before end date {}"
- .format(start, end))
+ raise ValueError(
+ "start date {} is not before end date {}".format(start, end)
+ )
self.start = start
self.end = end
@@ -748,8 +762,7 @@ def from_periods(
start: Optional[Period],
end: Optional[Period],
) -> DateInterval:
- """Create an interval with two Periods as the endpoints.
- """
+ """Create an interval with two Periods as the endpoints."""
end_date = end.open_right_endpoint() if end is not None else None
start_date = start.date if start is not None else None
return cls(start_date, end_date)
@@ -762,7 +775,7 @@ def contains(self, date: datetime) -> bool:
return True
def __str__(self) -> str:
- return f'[{self.start}, {self.end})'
+ return f"[{self.start}, {self.end})"
class DateQuery(FieldQuery):
@@ -805,10 +818,10 @@ def col_clause(self) -> Tuple[str, Sequence[SQLiteType]]:
if clause_parts:
# One- or two-sided interval.
- clause = ' AND '.join(clause_parts)
+ clause = " AND ".join(clause_parts)
else:
# Match any date.
- clause = '1'
+ clause = "1"
return clause, subvals
@@ -836,12 +849,13 @@ def _convert(self, s: str) -> Optional[float]:
return float(s)
except ValueError:
raise InvalidQueryArgumentValueError(
- s,
- "a M:SS string or a float")
+ s, "a M:SS string or a float"
+ )
# Sorting.
+
class Sort:
"""An abstract class representing a sort operation for a query into
the database.
@@ -854,8 +868,7 @@ def order_clause(self) -> Optional[str]:
return None
def sort(self, items: List) -> List:
- """Sort the list of objects and return a list.
- """
+ """Sort the list of objects and return a list."""
return sorted(items)
def is_slow(self) -> bool:
@@ -872,8 +885,7 @@ def __eq__(self, other) -> bool:
class MultipleSort(Sort):
- """Sort that encapsulates multiple sub-sorts.
- """
+ """Sort that encapsulates multiple sub-sorts."""
def __init__(self, sorts: Optional[List[Sort]] = None):
self.sorts = sorts or []
@@ -922,14 +934,13 @@ def sort(self, items):
return items
def __repr__(self):
- return f'MultipleSort({self.sorts!r})'
+ return f"MultipleSort({self.sorts!r})"
def __hash__(self):
return hash(tuple(self.sorts))
def __eq__(self, other):
- return super().__eq__(other) and \
- self.sorts == other.sorts
+ return super().__eq__(other) and self.sorts == other.sorts
class FieldSort(Sort):
@@ -953,7 +964,7 @@ def sort(self, objs: Collection):
# attributes with different types without falling over.
def key(obj: Model) -> Any:
- field_val = obj.get(self.field, '')
+ field_val = obj.get(self.field, "")
if self.case_insensitive and isinstance(field_val, str):
field_val = field_val.lower()
return field_val
@@ -961,32 +972,35 @@ def key(obj: Model) -> Any:
return sorted(objs, key=key, reverse=not self.ascending)
def __repr__(self) -> str:
- return '<{}: {}{}>'.format(
+ return "<{}: {}{}>".format(
type(self).__name__,
self.field,
- '+' if self.ascending else '-',
+ "+" if self.ascending else "-",
)
def __hash__(self) -> int:
return hash((self.field, self.ascending))
def __eq__(self, other) -> bool:
- return super().__eq__(other) and \
- self.field == other.field and \
- self.ascending == other.ascending
+ return (
+ super().__eq__(other)
+ and self.field == other.field
+ and self.ascending == other.ascending
+ )
class FixedFieldSort(FieldSort):
- """Sort object to sort on a fixed field.
- """
+ """Sort object to sort on a fixed field."""
def order_clause(self) -> str:
order = "ASC" if self.ascending else "DESC"
if self.case_insensitive:
- field = '(CASE ' \
- 'WHEN TYPEOF({0})="text" THEN LOWER({0}) ' \
- 'WHEN TYPEOF({0})="blob" THEN LOWER({0}) ' \
- 'ELSE {0} END)'.format(self.field)
+ field = (
+ "(CASE "
+ 'WHEN TYPEOF({0})="text" THEN LOWER({0}) '
+ 'WHEN TYPEOF({0})="blob" THEN LOWER({0}) '
+ "ELSE {0} END)".format(self.field)
+ )
else:
field = self.field
return f"{field} {order}"
diff --git a/beets/dbcore/queryparse.py b/beets/dbcore/queryparse.py
index dc51a5065b..e2b082ecc9 100644
--- a/beets/dbcore/queryparse.py
+++ b/beets/dbcore/queryparse.py
@@ -15,34 +15,30 @@
"""Parsing of strings into DBCore queries.
"""
-import re
import itertools
-from typing import Dict, Type, Tuple, Optional, Collection, List, \
- Sequence
+import re
+from typing import Collection, Dict, List, Optional, Sequence, Tuple, Type
-from . import query, Model
+from . import Model, query
from .query import Query, Sort
PARSE_QUERY_PART_REGEX = re.compile(
# Non-capturing optional segment for the keyword.
- r'(-|\^)?' # Negation prefixes.
-
- r'(?:'
- r'(\S+?)' # The field key.
- r'(? Tuple[Optional[str], str, Type[query.Query], bool]:
"""Parse a single *query part*, which is a chunk of a complete query
string representing a single criterion.
@@ -94,13 +90,13 @@ def parse_query_part(
assert match # Regex should always match
negate = bool(match.group(1))
key = match.group(2)
- term = match.group(3).replace('\\:', ':')
+ term = match.group(3).replace("\\:", ":")
# Check whether there's a prefix in the query and use the
# corresponding query type.
for pre, query_class in prefixes.items():
if term.startswith(pre):
- return key, term[len(pre):], query_class, negate
+ return key, term[len(pre) :], query_class, negate
# No matching prefix, so use either the query class determined by
# the field or the default as a fallback.
@@ -109,9 +105,9 @@ def parse_query_part(
def construct_query_part(
- model_cls: Type[Model],
- prefixes: Dict,
- query_part: str,
+ model_cls: Type[Model],
+ prefixes: Dict,
+ query_part: str,
) -> query.Query:
"""Parse a *query part* string and return a :class:`Query` object.
@@ -133,14 +129,16 @@ def construct_query_part(
# Use `model_cls` to build up a map from field (or query) names to
# `Query` classes.
query_classes: Dict[str, Type[Query]] = {}
- for k, t in itertools.chain(model_cls._fields.items(),
- model_cls._types.items()):
+ for k, t in itertools.chain(
+ model_cls._fields.items(), model_cls._types.items()
+ ):
query_classes[k] = t.query
query_classes.update(model_cls._queries) # Non-field queries.
# Parse the string.
- key, pattern, query_class, negate = \
- parse_query_part(query_part, query_classes, prefixes)
+ key, pattern, query_class, negate = parse_query_part(
+ query_part, query_classes, prefixes
+ )
# If there's no key (field name) specified, this is a "match
# anything" query.
@@ -149,8 +147,9 @@ def construct_query_part(
# The query type matches a specific field, but none was
# specified. So we use a version of the query that matches
# any field.
- out_query = query.AnyFieldQuery(pattern, model_cls._search_fields,
- query_class)
+ out_query = query.AnyFieldQuery(
+ pattern, model_cls._search_fields, query_class
+ )
elif issubclass(query_class, query.NamedQuery):
# Non-field query type.
out_query = query_class(pattern)
@@ -178,10 +177,10 @@ def construct_query_part(
# TYPING ERROR
def query_from_strings(
- query_cls: Type[query.CollectionQuery],
- model_cls: Type[Model],
- prefixes: Dict,
- query_parts: Collection[str],
+ query_cls: Type[query.CollectionQuery],
+ model_cls: Type[Model],
+ prefixes: Dict,
+ query_parts: Collection[str],
) -> query.Query:
"""Creates a collection query of type `query_cls` from a list of
strings in the format used by parse_query_part. `model_cls`
@@ -196,9 +195,9 @@ def query_from_strings(
def construct_sort_part(
- model_cls: Type[Model],
- part: str,
- case_insensitive: bool = True,
+ model_cls: Type[Model],
+ part: str,
+ case_insensitive: bool = True,
) -> Sort:
"""Create a `Sort` from a single string criterion.
@@ -211,12 +210,13 @@ def construct_sort_part(
field = part[:-1]
assert field, "field is missing"
direction = part[-1]
- assert direction in ('+', '-'), "part must end with + or -"
- is_ascending = direction == '+'
+ assert direction in ("+", "-"), "part must end with + or -"
+ is_ascending = direction == "+"
if field in model_cls._sorts:
- sort = model_cls._sorts[field](model_cls, is_ascending,
- case_insensitive)
+ sort = model_cls._sorts[field](
+ model_cls, is_ascending, case_insensitive
+ )
elif field in model_cls._fields:
sort = query.FixedFieldSort(field, is_ascending, case_insensitive)
else:
@@ -226,12 +226,11 @@ def construct_sort_part(
def sort_from_strings(
- model_cls: Type[Model],
- sort_parts: Sequence[str],
- case_insensitive: bool = True,
+ model_cls: Type[Model],
+ sort_parts: Sequence[str],
+ case_insensitive: bool = True,
) -> Sort:
- """Create a `Sort` from a list of sort criteria (strings).
- """
+ """Create a `Sort` from a list of sort criteria (strings)."""
if not sort_parts:
return query.NullSort()
elif len(sort_parts) == 1:
@@ -239,16 +238,17 @@ def sort_from_strings(
else:
sort = query.MultipleSort()
for part in sort_parts:
- sort.add_sort(construct_sort_part(model_cls, part,
- case_insensitive))
+ sort.add_sort(
+ construct_sort_part(model_cls, part, case_insensitive)
+ )
return sort
def parse_sorted_query(
- model_cls: Type[Model],
- parts: List[str],
- prefixes: Dict = {},
- case_insensitive: bool = True,
+ model_cls: Type[Model],
+ parts: List[str],
+ prefixes: Dict = {},
+ case_insensitive: bool = True,
) -> Tuple[query.Query, Sort]:
"""Given a list of strings, create the `Query` and `Sort` that they
represent.
@@ -260,24 +260,24 @@ def parse_sorted_query(
# Split up query in to comma-separated subqueries, each representing
# an AndQuery, which need to be joined together in one OrQuery
subquery_parts = []
- for part in parts + [',']:
- if part.endswith(','):
+ for part in parts + [","]:
+ if part.endswith(","):
# Ensure we can catch "foo, bar" as well as "foo , bar"
last_subquery_part = part[:-1]
if last_subquery_part:
subquery_parts.append(last_subquery_part)
# Parse the subquery in to a single AndQuery
# TODO: Avoid needlessly wrapping AndQueries containing 1 subquery?
- query_parts.append(query_from_strings(
- query.AndQuery, model_cls, prefixes, subquery_parts
- ))
+ query_parts.append(
+ query_from_strings(
+ query.AndQuery, model_cls, prefixes, subquery_parts
+ )
+ )
del subquery_parts[:]
else:
# Sort parts (1) end in + or -, (2) don't have a field, and
# (3) consist of more than just the + or -.
- if part.endswith(('+', '-')) \
- and ':' not in part \
- and len(part) > 1:
+ if part.endswith(("+", "-")) and ":" not in part and len(part) > 1:
sort_parts.append(part)
else:
subquery_parts.append(part)
diff --git a/beets/dbcore/types.py b/beets/dbcore/types.py
index 63829fcf6b..9b9c688da9 100644
--- a/beets/dbcore/types.py
+++ b/beets/dbcore/types.py
@@ -14,26 +14,30 @@
"""Representation of type information for DBCore model fields.
"""
-from abc import ABC
import sys
import typing
-from typing import Any, cast, Generic, List, TYPE_CHECKING, TypeVar, Union
-from .query import BooleanQuery, FieldQuery, NumericQuery, SubstringQuery
+from abc import ABC
+from typing import TYPE_CHECKING, Any, Generic, List, TypeVar, Union, cast
+
from beets.util import str2bool
+from .query import BooleanQuery, FieldQuery, NumericQuery, SubstringQuery
# Abstract base.
# FIXME: unconditionally define the Protocol once we drop Python 3.7
if TYPE_CHECKING and sys.version_info >= (3, 8):
+
class ModelType(typing.Protocol):
"""Protocol that specifies the required constructor for model types,
i.e. a function that takes any argument and attempts to parse it to the
given type.
"""
+
def __init__(self, value: Any = None):
...
+
else:
# No structural subtyping in Python < 3.8...
ModelType = Any
@@ -52,7 +56,7 @@ class Type(ABC, Generic[T, N]):
field.
"""
- sql: str = 'TEXT'
+ sql: str = "TEXT"
"""The SQLite column type for the value.
"""
@@ -70,8 +74,7 @@ class Type(ABC, Generic[T, N]):
@property
def null(self) -> N:
- """The value to be exposed when the underlying value is None.
- """
+ """The value to be exposed when the underlying value is None."""
# Note that this default implementation only makes sense for T = N.
# It would be better to implement `null()` only in subclasses, or
# have a field null_type similar to `model_type` and use that here.
@@ -85,9 +88,9 @@ def format(self, value: Union[N, T]) -> str:
value = self.null
# `self.null` might be `None`
if value is None:
- return ''
+ return ""
elif isinstance(value, bytes):
- return value.decode('utf-8', 'ignore')
+ return value.decode("utf-8", "ignore")
else:
return str(value)
@@ -131,7 +134,7 @@ def from_sql(
and the method must handle these in addition.
"""
if isinstance(sql_value, memoryview):
- sql_value = bytes(sql_value).decode('utf-8', 'ignore')
+ sql_value = bytes(sql_value).decode("utf-8", "ignore")
if isinstance(sql_value, str):
return self.parse(sql_value)
else:
@@ -146,6 +149,7 @@ def to_sql(self, model_value: Any) -> Union[None, int, float, str, bytes]:
# Reusable types.
+
class Default(Type[str, None]):
model_type = str
@@ -155,9 +159,9 @@ def null(self):
class BaseInteger(Type[int, N]):
- """A basic integer type.
- """
- sql = 'INTEGER'
+ """A basic integer type."""
+
+ sql = "INTEGER"
query = NumericQuery
model_type = int
@@ -186,11 +190,12 @@ class BasePaddedInt(BaseInteger[N]):
"""An integer field that is formatted with a given number of digits,
padded with zeroes.
"""
+
def __init__(self, digits: int):
self.digits = digits
def format(self, value: Union[int, N]) -> str:
- return '{0:0{1}d}'.format(value or 0, self.digits)
+ return "{0:0{1}d}".format(value or 0, self.digits)
class PaddedInt(BasePaddedInt[int]):
@@ -198,8 +203,8 @@ class PaddedInt(BasePaddedInt[int]):
class NullPaddedInt(BasePaddedInt[None]):
- """Same as `PaddedInt`, but does not normalize `None` to `0`.
- """
+ """Same as `PaddedInt`, but does not normalize `None` to `0`."""
+
@property
def null(self) -> None:
return None
@@ -209,32 +214,35 @@ class ScaledInt(Integer):
"""An integer whose formatting operation scales the number by a
constant and adds a suffix. Good for units with large magnitudes.
"""
- def __init__(self, unit: int, suffix: str = ''):
+
+ def __init__(self, unit: int, suffix: str = ""):
self.unit = unit
self.suffix = suffix
def format(self, value: int) -> str:
- return '{}{}'.format((value or 0) // self.unit, self.suffix)
+ return "{}{}".format((value or 0) // self.unit, self.suffix)
class Id(NullInteger):
"""An integer used as the row id or a foreign key in a SQLite table.
This type is nullable: None values are not translated to zero.
"""
+
@property
def null(self) -> None:
return None
def __init__(self, primary: bool = True):
if primary:
- self.sql = 'INTEGER PRIMARY KEY'
+ self.sql = "INTEGER PRIMARY KEY"
class BaseFloat(Type[float, N]):
"""A basic floating-point type. The `digits` parameter specifies how
many decimal places to use in the human-readable representation.
"""
- sql = 'REAL'
+
+ sql = "REAL"
query = NumericQuery
model_type = float
@@ -242,29 +250,29 @@ def __init__(self, digits: int = 1):
self.digits = digits
def format(self, value: Union[float, N]) -> str:
- return '{0:.{1}f}'.format(value or 0, self.digits)
+ return "{0:.{1}f}".format(value or 0, self.digits)
class Float(BaseFloat[float]):
- """Floating-point type that normalizes `None` to `0.0`.
- """
+ """Floating-point type that normalizes `None` to `0.0`."""
+
@property
def null(self) -> float:
return 0.0
class NullFloat(BaseFloat[None]):
- """Same as `Float`, but does not normalize `None` to `0.0`.
- """
+ """Same as `Float`, but does not normalize `None` to `0.0`."""
+
@property
def null(self) -> None:
return None
class BaseString(Type[T, N]):
- """A Unicode string type.
- """
- sql = 'TEXT'
+ """A Unicode string type."""
+
+ sql = "TEXT"
query = SubstringQuery
def normalize(self, value: Any) -> Union[T, N]:
@@ -275,8 +283,8 @@ def normalize(self, value: Any) -> Union[T, N]:
class String(BaseString[str, Any]):
- """A Unicode string type.
- """
+ """A Unicode string type."""
+
model_type = str
@@ -284,6 +292,7 @@ class DelimitedString(BaseString[List[str], List[str]]):
"""A list of Unicode strings, represented in-database by a single string
containing delimiter-separated values.
"""
+
model_type = list
def __init__(self, delimiter: str):
@@ -302,9 +311,9 @@ def to_sql(self, model_value: List[str]):
class Boolean(Type):
- """A boolean type.
- """
- sql = 'INTEGER'
+ """A boolean type."""
+
+ sql = "INTEGER"
query = BooleanQuery
model_type = bool
@@ -324,7 +333,7 @@ def parse(self, string: str) -> bool:
NULL_FLOAT = NullFloat()
STRING = String()
BOOLEAN = Boolean()
-SEMICOLON_SPACE_DSV = DelimitedString(delimiter='; ')
+SEMICOLON_SPACE_DSV = DelimitedString(delimiter="; ")
# Will set the proper null char in mediafile
-MULTI_VALUE_DSV = DelimitedString(delimiter='\\␀')
+MULTI_VALUE_DSV = DelimitedString(delimiter="\\␀")
diff --git a/beets/importer.py b/beets/importer.py
index b00919404c..f7c6232aa5 100644
--- a/beets/importer.py
+++ b/beets/importer.py
@@ -17,39 +17,40 @@
autotagging music files.
"""
+import itertools
import os
-import re
import pickle
-import itertools
-from collections import defaultdict
-from tempfile import mkdtemp
-from bisect import insort, bisect_left
-from contextlib import contextmanager
+import re
import shutil
import time
-
-from beets import logging
-from beets import autotag
-from beets import library
-from beets import dbcore
-from beets import plugins
-from beets import util
-from beets import config
-from beets.util import pipeline, sorted_walk, ancestry, MoveOperation
-from beets.util import syspath, normpath, displayable_path
+from bisect import bisect_left, insort
+from collections import defaultdict
+from contextlib import contextmanager
from enum import Enum
+from tempfile import mkdtemp
+
import mediafile
-action = Enum('action',
- ['SKIP', 'ASIS', 'TRACKS', 'APPLY', 'ALBUMS', 'RETAG'])
+from beets import autotag, config, dbcore, library, logging, plugins, util
+from beets.util import (
+ MoveOperation,
+ ancestry,
+ displayable_path,
+ normpath,
+ pipeline,
+ sorted_walk,
+ syspath,
+)
+
+action = Enum("action", ["SKIP", "ASIS", "TRACKS", "APPLY", "ALBUMS", "RETAG"])
# The RETAG action represents "don't apply any match, but do record
# new metadata". It's not reachable via the standard command prompt but
# can be used by plugins.
QUEUE_SIZE = 128
SINGLE_ARTIST_THRESH = 0.25
-PROGRESS_KEY = 'tagprogress'
-HISTORY_KEY = 'taghistory'
+PROGRESS_KEY = "tagprogress"
+HISTORY_KEY = "taghistory"
# Usually flexible attributes are preserved (i.e., not updated) during
# reimports. The following two lists (globally) change this behaviour for
# certain fields. To alter these lists only when a specific plugin is in use,
@@ -59,49 +60,56 @@
# def extend_reimport_fresh_fields_item():
# importer.REIMPORT_FRESH_FIELDS_ITEM.extend(['tidal_track_popularity']
# )
-REIMPORT_FRESH_FIELDS_ALBUM = ['data_source']
-REIMPORT_FRESH_FIELDS_ITEM = ['data_source', 'bandcamp_album_id',
- 'spotify_album_id', 'deezer_album_id',
- 'beatport_album_id', 'tidal_album_id']
+REIMPORT_FRESH_FIELDS_ALBUM = ["data_source"]
+REIMPORT_FRESH_FIELDS_ITEM = [
+ "data_source",
+ "bandcamp_album_id",
+ "spotify_album_id",
+ "deezer_album_id",
+ "beatport_album_id",
+ "tidal_album_id",
+]
# Global logger.
-log = logging.getLogger('beets')
+log = logging.getLogger("beets")
class ImportAbort(Exception):
- """Raised when the user aborts the tagging operation.
- """
+ """Raised when the user aborts the tagging operation."""
+
pass
# Utilities.
+
def _open_state():
"""Reads the state file, returning a dictionary."""
try:
- with open(config['statefile'].as_filename(), 'rb') as f:
+ with open(config["statefile"].as_filename(), "rb") as f:
return pickle.load(f)
except Exception as exc:
# The `pickle` module can emit all sorts of exceptions during
# unpickling, including ImportError. We use a catch-all
# exception to avoid enumerating them all (the docs don't even have a
# full list!).
- log.debug('state file could not be read: {0}', exc)
+ log.debug("state file could not be read: {0}", exc)
return {}
def _save_state(state):
"""Writes the state dictionary out to disk."""
try:
- with open(config['statefile'].as_filename(), 'wb') as f:
+ with open(config["statefile"].as_filename(), "wb") as f:
pickle.dump(state, f)
except OSError as exc:
- log.error('state file could not be written: {0}', exc)
+ log.error("state file could not be written: {0}", exc)
# Utilities for reading and writing the beets progress file, which
# allows long tagging tasks to be resumed when they pause (or crash).
+
def progress_read():
state = _open_state()
return state.setdefault(PROGRESS_KEY, {})
@@ -133,8 +141,7 @@ def progress_add(toppath, *paths):
def progress_element(toppath, path):
- """Return whether `path` has been imported in `toppath`.
- """
+ """Return whether `path` has been imported in `toppath`."""
state = progress_read()
if toppath not in state:
return False
@@ -161,6 +168,7 @@ def progress_reset(toppath):
# This keeps track of all directories that were ever imported, which
# allows the importer to only import new stuff.
+
def history_add(paths):
"""Indicate that the import of the album in `paths` is completed and
should not be repeated in incremental imports.
@@ -175,8 +183,7 @@ def history_add(paths):
def history_get():
- """Get the set of completed path tuples in incremental imports.
- """
+ """Get the set of completed path tuples in incremental imports."""
state = _open_state()
if HISTORY_KEY not in state:
return set()
@@ -185,6 +192,7 @@ def history_get():
# Abstract session class.
+
class ImportSession:
"""Controls an import action. Subclasses should implement methods to
communicate with the user or otherwise make decisions.
@@ -225,52 +233,53 @@ def set_config(self, config):
self.config = iconfig
# Incremental and progress are mutually exclusive.
- if iconfig['incremental']:
- iconfig['resume'] = False
+ if iconfig["incremental"]:
+ iconfig["resume"] = False
# When based on a query instead of directories, never
# save progress or try to resume.
if self.query is not None:
- iconfig['resume'] = False
- iconfig['incremental'] = False
+ iconfig["resume"] = False
+ iconfig["incremental"] = False
- if iconfig['reflink']:
- iconfig['reflink'] = iconfig['reflink'] \
- .as_choice(['auto', True, False])
+ if iconfig["reflink"]:
+ iconfig["reflink"] = iconfig["reflink"].as_choice(
+ ["auto", True, False]
+ )
# Copy, move, reflink, link, and hardlink are mutually exclusive.
- if iconfig['move']:
- iconfig['copy'] = False
- iconfig['link'] = False
- iconfig['hardlink'] = False
- iconfig['reflink'] = False
- elif iconfig['link']:
- iconfig['copy'] = False
- iconfig['move'] = False
- iconfig['hardlink'] = False
- iconfig['reflink'] = False
- elif iconfig['hardlink']:
- iconfig['copy'] = False
- iconfig['move'] = False
- iconfig['link'] = False
- iconfig['reflink'] = False
- elif iconfig['reflink']:
- iconfig['copy'] = False
- iconfig['move'] = False
- iconfig['link'] = False
- iconfig['hardlink'] = False
+ if iconfig["move"]:
+ iconfig["copy"] = False
+ iconfig["link"] = False
+ iconfig["hardlink"] = False
+ iconfig["reflink"] = False
+ elif iconfig["link"]:
+ iconfig["copy"] = False
+ iconfig["move"] = False
+ iconfig["hardlink"] = False
+ iconfig["reflink"] = False
+ elif iconfig["hardlink"]:
+ iconfig["copy"] = False
+ iconfig["move"] = False
+ iconfig["link"] = False
+ iconfig["reflink"] = False
+ elif iconfig["reflink"]:
+ iconfig["copy"] = False
+ iconfig["move"] = False
+ iconfig["link"] = False
+ iconfig["hardlink"] = False
# Only delete when copying.
- if not iconfig['copy']:
- iconfig['delete'] = False
+ if not iconfig["copy"]:
+ iconfig["delete"] = False
- self.want_resume = config['resume'].as_choice([True, False, 'ask'])
+ self.want_resume = config["resume"].as_choice([True, False, "ask"])
def tag_log(self, status, paths):
"""Log a message about a given album to the importer log. The status
should reflect the reason the album couldn't be tagged.
"""
- self.logger.info('{0} {1}', status, displayable_path(paths))
+ self.logger.info("{0} {1}", status, displayable_path(paths))
def log_choice(self, task, duplicate=False):
"""Logs the task's current choice if it should be logged. If
@@ -281,17 +290,17 @@ def log_choice(self, task, duplicate=False):
if duplicate:
# Duplicate: log all three choices (skip, keep both, and trump).
if task.should_remove_duplicates:
- self.tag_log('duplicate-replace', paths)
+ self.tag_log("duplicate-replace", paths)
elif task.choice_flag in (action.ASIS, action.APPLY):
- self.tag_log('duplicate-keep', paths)
+ self.tag_log("duplicate-keep", paths)
elif task.choice_flag is (action.SKIP):
- self.tag_log('duplicate-skip', paths)
+ self.tag_log("duplicate-skip", paths)
else:
# Non-duplicate: log "skip" and "asis" choices.
if task.choice_flag is action.ASIS:
- self.tag_log('asis', paths)
+ self.tag_log("asis", paths)
elif task.choice_flag is action.SKIP:
- self.tag_log('skip', paths)
+ self.tag_log("skip", paths)
def should_resume(self, path):
raise NotImplementedError
@@ -306,10 +315,9 @@ def choose_item(self, task):
raise NotImplementedError
def run(self):
- """Run the import task.
- """
- self.logger.info('import started {0}', time.asctime())
- self.set_config(config['import'])
+ """Run the import task."""
+ self.logger.info("import started {0}", time.asctime())
+ self.set_config(config["import"])
# Set up the pipeline.
if self.query is None:
@@ -318,11 +326,10 @@ def run(self):
stages = [query_tasks(self)]
# In pretend mode, just log what would otherwise be imported.
- if self.config['pretend']:
+ if self.config["pretend"]:
stages += [log_files(self)]
else:
- if self.config['group_albums'] and \
- not self.config['singletons']:
+ if self.config["group_albums"] and not self.config["singletons"]:
# Split directory tasks into one task for each album.
stages += [group_albums(self)]
@@ -331,7 +338,7 @@ def run(self):
# import everything as-is. In *both* cases, these stages
# also add the music to the library database, so later
# stages need to read and write data from there.
- if self.config['autotag']:
+ if self.config["autotag"]:
stages += [lookup_candidates(self), user_query(self)]
else:
stages += [import_asis(self)]
@@ -347,9 +354,9 @@ def run(self):
pl = pipeline.Pipeline(stages)
# Run the pipeline.
- plugins.send('import_begin', session=self)
+ plugins.send("import_begin", session=self)
try:
- if config['threaded']:
+ if config["threaded"]:
pl.run_parallel(QUEUE_SIZE)
else:
pl.run_sequential()
@@ -363,18 +370,18 @@ def already_imported(self, toppath, paths):
"""Returns true if the files belonging to this task have already
been imported in a previous session.
"""
- if self.is_resuming(toppath) \
- and all([progress_element(toppath, p) for p in paths]):
+ if self.is_resuming(toppath) and all(
+ [progress_element(toppath, p) for p in paths]
+ ):
return True
- if self.config['incremental'] \
- and tuple(paths) in self.history_dirs:
+ if self.config["incremental"] and tuple(paths) in self.history_dirs:
return True
return False
@property
def history_dirs(self):
- if not hasattr(self, '_history_dirs'):
+ if not hasattr(self, "_history_dirs"):
self._history_dirs = history_get()
return self._history_dirs
@@ -383,18 +390,17 @@ def already_merged(self, paths):
during previous tasks.
"""
for path in paths:
- if path not in self._merged_items \
- and path not in self._merged_dirs:
+ if path not in self._merged_items and path not in self._merged_dirs:
return False
return True
def mark_merged(self, paths):
- """Mark paths and directories as merged for future reimport tasks.
- """
+ """Mark paths and directories as merged for future reimport tasks."""
self._merged_items.update(paths)
- dirs = {os.path.dirname(path)
- if os.path.isfile(syspath(path)) else path
- for path in paths}
+ dirs = {
+ os.path.dirname(path) if os.path.isfile(syspath(path)) else path
+ for path in paths
+ }
self._merged_dirs.update(dirs)
def is_resuming(self, toppath):
@@ -412,10 +418,11 @@ def ask_resume(self, toppath):
"""
if self.want_resume and has_progress(toppath):
# Either accept immediately or prompt for input to decide.
- if self.want_resume is True or \
- self.should_resume(toppath):
- log.warning('Resuming interrupted import of {0}',
- util.displayable_path(toppath))
+ if self.want_resume is True or self.should_resume(toppath):
+ log.warning(
+ "Resuming interrupted import of {0}",
+ util.displayable_path(toppath),
+ )
self._is_resuming[toppath] = True
else:
# Clear progress; we're starting from the top.
@@ -424,11 +431,12 @@ def ask_resume(self, toppath):
# The importer task class.
+
class BaseImportTask:
"""An abstract base class for importer tasks.
Tasks flow through the importer pipeline. Each stage can update
- them. """
+ them."""
def __init__(self, toppath, paths, items):
"""Create a task. The primary fields that define a task are:
@@ -502,8 +510,13 @@ def set_choice(self, choice):
"""
# Not part of the task structure:
assert choice != action.APPLY # Only used internally.
- if choice in (action.SKIP, action.ASIS, action.TRACKS, action.ALBUMS,
- action.RETAG):
+ if choice in (
+ action.SKIP,
+ action.ASIS,
+ action.TRACKS,
+ action.ALBUMS,
+ action.RETAG,
+ ):
self.choice_flag = choice
self.match = None
else:
@@ -518,8 +531,7 @@ def save_progress(self):
progress_add(self.toppath, *self.paths)
def save_history(self):
- """Save the directory in the history for incremental imports.
- """
+ """Save the directory in the history for incremental imports."""
if self.paths:
history_add(self.paths)
@@ -562,9 +574,8 @@ def imported_items(self):
assert False
def apply_metadata(self):
- """Copy metadata from match info to the items.
- """
- if config['import']['from_scratch']:
+ """Copy metadata from match info to the items."""
+ if config["import"]["from_scratch"]:
for item in self.match.mapping:
item.clear()
@@ -578,27 +589,29 @@ def duplicate_items(self, lib):
def remove_duplicates(self, lib):
duplicate_items = self.duplicate_items(lib)
- log.debug('removing {0} old duplicated items', len(duplicate_items))
+ log.debug("removing {0} old duplicated items", len(duplicate_items))
for item in duplicate_items:
item.remove()
if lib.directory in util.ancestry(item.path):
- log.debug('deleting duplicate {0}',
- util.displayable_path(item.path))
+ log.debug(
+ "deleting duplicate {0}", util.displayable_path(item.path)
+ )
util.remove(item.path)
- util.prune_dirs(os.path.dirname(item.path),
- lib.directory)
+ util.prune_dirs(os.path.dirname(item.path), lib.directory)
def set_fields(self, lib):
"""Sets the fields given at CLI or configuration to the specified
values, for both the album and all its items.
"""
items = self.imported_items()
- for field, view in config['import']['set_fields'].items():
+ for field, view in config["import"]["set_fields"].items():
value = view.get()
- log.debug('Set field {1}={2} for {0}',
- displayable_path(self.paths),
- field,
- value)
+ log.debug(
+ "Set field {1}={2} for {0}",
+ displayable_path(self.paths),
+ field,
+ value,
+ )
self.album.set_parse(field, format(self.album, value))
for item in items:
item.set_parse(field, format(item, value))
@@ -608,27 +621,28 @@ def set_fields(self, lib):
self.album.store()
def finalize(self, session):
- """Save progress, clean up files, and emit plugin event.
- """
+ """Save progress, clean up files, and emit plugin event."""
# Update progress.
if session.want_resume:
self.save_progress()
- if session.config['incremental'] and not (
+ if session.config["incremental"] and not (
# Should we skip recording to incremental list?
- self.skip and session.config['incremental_skip_later']
+ self.skip
+ and session.config["incremental_skip_later"]
):
self.save_history()
- self.cleanup(copy=session.config['copy'],
- delete=session.config['delete'],
- move=session.config['move'])
+ self.cleanup(
+ copy=session.config["copy"],
+ delete=session.config["delete"],
+ move=session.config["move"],
+ )
if not self.skip:
self._emit_imported(session.lib)
def cleanup(self, copy=False, delete=False, move=False):
- """Remove and prune imported paths.
- """
+ """Remove and prune imported paths."""
# Do not delete any files or prune directories when skipping.
if self.skip:
return
@@ -650,7 +664,7 @@ def cleanup(self, copy=False, delete=False, move=False):
self.prune(old_path)
def _emit_imported(self, lib):
- plugins.send('album_imported', lib=lib, album=self.album)
+ plugins.send("album_imported", lib=lib, album=self.album)
def handle_created(self, session):
"""Send the `import_task_created` event for this task. Return a list of
@@ -658,7 +672,7 @@ def handle_created(self, session):
list containing only the task itself, but plugins can replace the task
with new ones.
"""
- tasks = plugins.send('import_task_created', session=session, task=self)
+ tasks = plugins.send("import_task_created", session=session, task=self)
if not tasks:
tasks = [self]
else:
@@ -671,8 +685,9 @@ def lookup_candidates(self):
candidate IDs are stored in self.search_ids: if present, the
initial lookup is restricted to only those IDs.
"""
- artist, album, prop = \
- autotag.tag_album(self.items, search_ids=self.search_ids)
+ artist, album, prop = autotag.tag_album(
+ self.items, search_ids=self.search_ids
+ )
self.cur_artist = artist
self.cur_album = album
self.candidates = prop.candidates
@@ -683,20 +698,19 @@ def find_duplicates(self, lib):
album name as the task.
"""
info = self.chosen_info()
- info['albumartist'] = info['artist']
+ info["albumartist"] = info["artist"]
- if info['artist'] is None:
+ if info["artist"] is None:
# As-is import with no artist. Skip check.
return []
# Construct a query to find duplicates with this metadata. We
# use a temporary Album object to generate any computed fields.
tmp_album = library.Album(lib, **info)
- keys = config['import']['duplicate_keys']['album'].as_str_seq()
- dup_query = library.Album.all_fields_query({
- key: tmp_album.get(key)
- for key in keys
- })
+ keys = config["import"]["duplicate_keys"]["album"].as_str_seq()
+ dup_query = library.Album.all_fields_query(
+ {key: tmp_album.get(key) for key in keys}
+ )
# Don't count albums with the same files as duplicates.
task_paths = {i.path for i in self.items if i}
@@ -725,35 +739,36 @@ def align_album_level_fields(self):
plur_albumartist, freq = util.plurality(
[i.albumartist or i.artist for i in self.items]
)
- if freq == len(self.items) or \
- (freq > 1 and
- float(freq) / len(self.items) >= SINGLE_ARTIST_THRESH):
+ if freq == len(self.items) or (
+ freq > 1
+ and float(freq) / len(self.items) >= SINGLE_ARTIST_THRESH
+ ):
# Single-artist album.
- changes['albumartist'] = plur_albumartist
- changes['comp'] = False
+ changes["albumartist"] = plur_albumartist
+ changes["comp"] = False
else:
# VA.
- changes['albumartist'] = config['va_name'].as_str()
- changes['comp'] = True
+ changes["albumartist"] = config["va_name"].as_str()
+ changes["comp"] = True
elif self.choice_flag in (action.APPLY, action.RETAG):
# Applying autotagged metadata. Just get AA from the first
# item.
if not self.items[0].albumartist:
- changes['albumartist'] = self.items[0].artist
+ changes["albumartist"] = self.items[0].artist
if not self.items[0].albumartists:
- changes['albumartists'] = self.items[0].artists
+ changes["albumartists"] = self.items[0].artists
if not self.items[0].mb_albumartistid:
- changes['mb_albumartistid'] = self.items[0].mb_artistid
+ changes["mb_albumartistid"] = self.items[0].mb_artistid
if not self.items[0].mb_albumartistids:
- changes['mb_albumartistids'] = self.items[0].mb_artistids
+ changes["mb_albumartistids"] = self.items[0].mb_artistids
# Apply new metadata.
for item in self.items:
item.update(changes)
def manipulate_files(self, operation=None, write=False, session=None):
- """ Copy, move, link, hardlink or reflink (depending on `operation`)
+ """Copy, move, link, hardlink or reflink (depending on `operation`)
the files as well as write metadata.
`operation` should be an instance of `util.MoveOperation`.
@@ -771,9 +786,11 @@ def manipulate_files(self, operation=None, write=False, session=None):
# move in-library files. (Out-of-library files are
# copied/moved as usual).
old_path = item.path
- if (operation != MoveOperation.MOVE
- and self.replaced_items[item]
- and session.lib.directory in util.ancestry(old_path)):
+ if (
+ operation != MoveOperation.MOVE
+ and self.replaced_items[item]
+ and session.lib.directory in util.ancestry(old_path)
+ ):
item.move()
# We moved the item, so remove the
# now-nonexistent file from old_paths.
@@ -790,17 +807,16 @@ def manipulate_files(self, operation=None, write=False, session=None):
for item in self.imported_items():
item.store()
- plugins.send('import_task_files', session=session, task=self)
+ plugins.send("import_task_files", session=session, task=self)
def add(self, lib):
- """Add the items as an album to the library and remove replaced items.
- """
+ """Add the items as an album to the library and remove replaced items."""
self.align_album_level_fields()
with lib.transaction():
self.record_replaced(lib)
self.remove_replaced(lib)
self.album = lib.add_album(self.imported_items())
- if 'data_source' in self.imported_items()[0]:
+ if "data_source" in self.imported_items()[0]:
self.album.data_source = self.imported_items()[0].data_source
self.reimport_metadata(lib)
@@ -812,13 +828,15 @@ def record_replaced(self, lib):
self.replaced_albums = defaultdict(list)
replaced_album_ids = set()
for item in self.imported_items():
- dup_items = list(lib.items(
- dbcore.query.BytesQuery('path', item.path)
- ))
+ dup_items = list(
+ lib.items(dbcore.query.BytesQuery("path", item.path))
+ )
self.replaced_items[item] = dup_items
for dup_item in dup_items:
- if (not dup_item.album_id or
- dup_item.album_id in replaced_album_ids):
+ if (
+ not dup_item.album_id
+ or dup_item.album_id in replaced_album_ids
+ ):
continue
replaced_album = dup_item._cached_album
if replaced_album:
@@ -829,24 +847,31 @@ def reimport_metadata(self, lib):
"""For reimports, preserves metadata for reimported items and
albums.
"""
+
def _reduce_and_log(new_obj, existing_fields, overwrite_keys):
"""Some flexible attributes should be overwritten (rather than
preserved) on reimports; Copies existing_fields, logs and removes
entries that should not be preserved and returns a dict containing
those fields left to actually be preserved.
"""
- noun = 'album' if isinstance(new_obj, library.Album) else 'item'
+ noun = "album" if isinstance(new_obj, library.Album) else "item"
existing_fields = dict(existing_fields)
- overwritten_fields = [k for k in existing_fields
- if k in overwrite_keys
- and new_obj.get(k)
- and existing_fields.get(k) != new_obj.get(k)]
+ overwritten_fields = [
+ k
+ for k in existing_fields
+ if k in overwrite_keys
+ and new_obj.get(k)
+ and existing_fields.get(k) != new_obj.get(k)
+ ]
if overwritten_fields:
log.debug(
- 'Reimported {} {}. Not preserving flexible attributes {}. '
- 'Path: {}',
- noun, new_obj.id, overwritten_fields,
- displayable_path(new_obj.path))
+ "Reimported {} {}. Not preserving flexible attributes {}. "
+ "Path: {}",
+ noun,
+ new_obj.id,
+ overwritten_fields,
+ displayable_path(new_obj.path),
+ )
for key in overwritten_fields:
del existing_fields[key]
return existing_fields
@@ -854,9 +879,11 @@ def _reduce_and_log(new_obj, existing_fields, overwrite_keys):
if self.is_album:
replaced_album = self.replaced_albums.get(self.album.path)
if replaced_album:
- album_fields = _reduce_and_log(self.album,
- replaced_album._values_flex,
- REIMPORT_FRESH_FIELDS_ALBUM)
+ album_fields = _reduce_and_log(
+ self.album,
+ replaced_album._values_flex,
+ REIMPORT_FRESH_FIELDS_ALBUM,
+ )
self.album.added = replaced_album.added
self.album.update(album_fields)
self.album.artpath = replaced_album.artpath
@@ -864,12 +891,16 @@ def _reduce_and_log(new_obj, existing_fields, overwrite_keys):
log.debug(
"Reimported album {}. Preserving attribute ['added']. "
"Path: {}",
- self.album.id, displayable_path(self.album.path))
+ self.album.id,
+ displayable_path(self.album.path),
+ )
log.debug(
- 'Reimported album {}. Preserving flexible attributes {}. '
- 'Path: {}',
- self.album.id, list(album_fields.keys()),
- displayable_path(self.album.path))
+ "Reimported album {}. Preserving flexible attributes {}. "
+ "Path: {}",
+ self.album.id,
+ list(album_fields.keys()),
+ displayable_path(self.album.path),
+ )
for item in self.imported_items():
dup_items = self.replaced_items[item]
@@ -879,15 +910,20 @@ def _reduce_and_log(new_obj, existing_fields, overwrite_keys):
log.debug(
"Reimported item {}. Preserving attribute ['added']. "
"Path: {}",
- item.id, displayable_path(item.path))
- item_fields = _reduce_and_log(item, dup_item._values_flex,
- REIMPORT_FRESH_FIELDS_ITEM)
+ item.id,
+ displayable_path(item.path),
+ )
+ item_fields = _reduce_and_log(
+ item, dup_item._values_flex, REIMPORT_FRESH_FIELDS_ITEM
+ )
item.update(item_fields)
log.debug(
- 'Reimported item {}. Preserving flexible attributes {}. '
- 'Path: {}',
- item.id, list(item_fields.keys()),
- displayable_path(item.path))
+ "Reimported item {}. Preserving flexible attributes {}. "
+ "Path: {}",
+ item.id,
+ list(item_fields.keys()),
+ displayable_path(item.path),
+ )
item.store()
def remove_replaced(self, lib):
@@ -896,23 +932,26 @@ def remove_replaced(self, lib):
"""
for item in self.imported_items():
for dup_item in self.replaced_items[item]:
- log.debug('Replacing item {0}: {1}',
- dup_item.id, displayable_path(item.path))
+ log.debug(
+ "Replacing item {0}: {1}",
+ dup_item.id,
+ displayable_path(item.path),
+ )
dup_item.remove()
- log.debug('{0} of {1} items replaced',
- sum(bool(l) for l in self.replaced_items.values()),
- len(self.imported_items()))
+ log.debug(
+ "{0} of {1} items replaced",
+ sum(bool(l) for l in self.replaced_items.values()),
+ len(self.imported_items()),
+ )
def choose_match(self, session):
- """Ask the session which match should apply and apply it.
- """
+ """Ask the session which match should apply and apply it."""
choice = session.choose_match(self)
self.set_choice(choice)
session.log_choice(self)
def reload(self):
- """Reload albums and items from the database.
- """
+ """Reload albums and items from the database."""
for item in self.imported_items():
item.load()
self.album.load()
@@ -927,14 +966,15 @@ def prune(self, filename):
call when the file in question may not have been removed.
"""
if self.toppath and not os.path.exists(syspath(filename)):
- util.prune_dirs(os.path.dirname(filename),
- self.toppath,
- clutter=config['clutter'].as_str_seq())
+ util.prune_dirs(
+ os.path.dirname(filename),
+ self.toppath,
+ clutter=config["clutter"].as_str_seq(),
+ )
class SingletonImportTask(ImportTask):
- """ImportTask for a single track that is not associated to an album.
- """
+ """ImportTask for a single track that is not associated to an album."""
def __init__(self, toppath, item):
super().__init__(toppath, [item.path], [item])
@@ -962,7 +1002,7 @@ def apply_metadata(self):
def _emit_imported(self, lib):
for item in self.imported_items():
- plugins.send('item_imported', lib=lib, item=item)
+ plugins.send("item_imported", lib=lib, item=item)
def lookup_candidates(self):
prop = autotag.tag_item(self.item, search_ids=self.search_ids)
@@ -978,11 +1018,10 @@ def find_duplicates(self, lib):
# Query for existing items using the same metadata. We use a
# temporary `Item` object to generate any computed fields.
tmp_item = library.Item(lib, **info)
- keys = config['import']['duplicate_keys']['item'].as_str_seq()
- dup_query = library.Album.all_fields_query({
- key: tmp_item.get(key)
- for key in keys
- })
+ keys = config["import"]["duplicate_keys"]["item"].as_str_seq()
+ dup_query = library.Album.all_fields_query(
+ {key: tmp_item.get(key) for key in keys}
+ )
found_items = []
for other_item in lib.items(dup_query):
@@ -1004,8 +1043,7 @@ def infer_album_fields(self):
raise NotImplementedError
def choose_match(self, session):
- """Ask the session which match should apply and apply it.
- """
+ """Ask the session which match should apply and apply it."""
choice = session.choose_item(self)
self.set_choice(choice)
session.log_choice(self)
@@ -1017,12 +1055,14 @@ def set_fields(self, lib):
"""Sets the fields given at CLI or configuration to the specified
values, for the singleton item.
"""
- for field, view in config['import']['set_fields'].items():
+ for field, view in config["import"]["set_fields"].items():
value = view.get()
- log.debug('Set field {1}={2} for {0}',
- displayable_path(self.paths),
- field,
- value)
+ log.debug(
+ "Set field {1}={2} for {0}",
+ displayable_path(self.paths),
+ field,
+ value,
+ )
self.item.set_parse(field, format(self.item, value))
self.item.store()
@@ -1109,20 +1149,22 @@ def handlers(cls):
handled by `ArchiveClass`. `ArchiveClass` is a class that
implements the same interface as `tarfile.TarFile`.
"""
- if not hasattr(cls, '_handlers'):
+ if not hasattr(cls, "_handlers"):
cls._handlers = []
- from zipfile import is_zipfile, ZipFile
+ from zipfile import ZipFile, is_zipfile
+
cls._handlers.append((is_zipfile, ZipFile))
import tarfile
+
cls._handlers.append((tarfile.is_tarfile, tarfile.open))
try:
- from rarfile import is_rarfile, RarFile
+ from rarfile import RarFile, is_rarfile
except ImportError:
pass
else:
cls._handlers.append((is_rarfile, RarFile))
try:
- from py7zr import is_7zfile, SevenZipFile
+ from py7zr import SevenZipFile, is_7zfile
except ImportError:
pass
else:
@@ -1131,11 +1173,12 @@ def handlers(cls):
return cls._handlers
def cleanup(self, **kwargs):
- """Removes the temporary directory the archive was extracted to.
- """
+ """Removes the temporary directory the archive was extracted to."""
if self.extracted:
- log.debug('Removing extracted directory: {0}',
- displayable_path(self.toppath))
+ log.debug(
+ "Removing extracted directory: {0}",
+ displayable_path(self.toppath),
+ )
shutil.rmtree(syspath(self.toppath))
def extract(self):
@@ -1147,7 +1190,7 @@ def extract(self):
break
extract_to = mkdtemp()
- archive = handler_class(util.py3_path(self.toppath), mode='r')
+ archive = handler_class(util.py3_path(self.toppath), mode="r")
try:
archive.extractall(extract_to)
@@ -1208,7 +1251,7 @@ def tasks(self):
# Search for music in the directory.
for dirs, paths in self.paths():
- if self.session.config['singletons']:
+ if self.session.config["singletons"]:
for path in paths:
tasks = self._create(self.singleton(path))
yield from tasks
@@ -1251,7 +1294,7 @@ def paths(self):
"""
if not os.path.isdir(syspath(self.toppath)):
yield [self.toppath], [self.toppath]
- elif self.session.config['flat']:
+ elif self.session.config["flat"]:
paths = []
for dirs, paths_in_dir in albums_in_dir(self.toppath):
paths += paths_in_dir
@@ -1261,11 +1304,11 @@ def paths(self):
yield dirs, paths
def singleton(self, path):
- """Return a `SingletonImportTask` for the music file.
- """
+ """Return a `SingletonImportTask` for the music file."""
if self.session.already_imported(self.toppath, [path]):
- log.debug('Skipping previously-imported path: {0}',
- displayable_path(path))
+ log.debug(
+ "Skipping previously-imported path: {0}", displayable_path(path)
+ )
self.skipped += 1
return None
@@ -1288,8 +1331,9 @@ def album(self, paths, dirs=None):
dirs = list({os.path.dirname(p) for p in paths})
if self.session.already_imported(self.toppath, dirs):
- log.debug('Skipping previously-imported path: {0}',
- displayable_path(dirs))
+ log.debug(
+ "Skipping previously-imported path: {0}", displayable_path(dirs)
+ )
self.skipped += 1
return None
@@ -1316,24 +1360,24 @@ def unarchive(self):
"""
assert self.is_archive
- if not (self.session.config['move'] or
- self.session.config['copy']):
- log.warning("Archive importing requires either "
- "'copy' or 'move' to be enabled.")
+ if not (self.session.config["move"] or self.session.config["copy"]):
+ log.warning(
+ "Archive importing requires either "
+ "'copy' or 'move' to be enabled."
+ )
return
- log.debug('Extracting archive: {0}',
- displayable_path(self.toppath))
+ log.debug("Extracting archive: {0}", displayable_path(self.toppath))
archive_task = ArchiveImportTask(self.toppath)
try:
archive_task.extract()
except Exception as exc:
- log.error('extraction failed: {0}', exc)
+ log.error("extraction failed: {0}", exc)
return
# Now read albums from the extracted directory.
self.toppath = archive_task.toppath
- log.debug('Archive extracted to: {0}', self.toppath)
+ log.debug("Archive extracted to: {0}", self.toppath)
return archive_task
def read_item(self, path):
@@ -1349,14 +1393,14 @@ def read_item(self, path):
# Silently ignore non-music files.
pass
elif isinstance(exc.reason, mediafile.UnreadableFileError):
- log.warning('unreadable file: {0}', displayable_path(path))
+ log.warning("unreadable file: {0}", displayable_path(path))
else:
- log.error('error reading {0}: {1}',
- displayable_path(path), exc)
+ log.error("error reading {0}: {1}", displayable_path(path), exc)
# Pipeline utilities
+
def _freshen_items(items):
# Clear IDs from re-tagged items so they appear "fresh" when
# we add them back to the library.
@@ -1378,6 +1422,7 @@ def _extend_pipeline(tasks, *stages):
# Full-album pipeline stages.
+
def read_tasks(session):
"""A generator yielding all the albums (as ImportTask objects) found
in the user-specified list of paths. In the case of a singleton
@@ -1394,12 +1439,11 @@ def read_tasks(session):
skipped += task_factory.skipped
if not task_factory.imported:
- log.warning('No files imported from {0}',
- displayable_path(toppath))
+ log.warning("No files imported from {0}", displayable_path(toppath))
# Show skipped directories (due to incremental/resume).
if skipped:
- log.info('Skipped {0} paths.', skipped)
+ log.info("Skipped {0} paths.", skipped)
def query_tasks(session):
@@ -1407,7 +1451,7 @@ def query_tasks(session):
Instead of finding files from the filesystem, a query is used to
match items from the library.
"""
- if session.config['singletons']:
+ if session.config["singletons"]:
# Search for items.
for item in session.lib.items(session.query):
task = SingletonImportTask(None, item)
@@ -1417,8 +1461,12 @@ def query_tasks(session):
else:
# Search for albums.
for album in session.lib.albums(session.query):
- log.debug('yielding album {0}: {1} - {2}',
- album.id, album.albumartist, album.album)
+ log.debug(
+ "yielding album {0}: {1} - {2}",
+ album.id,
+ album.albumartist,
+ album.album,
+ )
items = list(album.items())
_freshen_items(items)
@@ -1439,12 +1487,12 @@ def lookup_candidates(session, task):
# abstraction.
return
- plugins.send('import_task_start', session=session, task=task)
- log.debug('Looking up: {0}', displayable_path(task.paths))
+ plugins.send("import_task_start", session=session, task=task)
+ log.debug("Looking up: {0}", displayable_path(task.paths))
# Restrict the initial lookup to IDs specified by the user via the -m
# option. Currently all the IDs are passed onto the tasks directly.
- task.search_ids = session.config['search_ids'].as_str_seq()
+ task.search_ids = session.config["search_ids"].as_str_seq()
task.lookup_candidates()
@@ -1471,7 +1519,7 @@ def user_query(session, task):
# Ask the user for a choice.
task.choose_match(session)
- plugins.send('import_task_choice', session=session, task=task)
+ plugins.send("import_task_choice", session=session, task=task)
# As-tracks: transition to singleton workflow.
if task.choice_flag is action.TRACKS:
@@ -1482,16 +1530,18 @@ def emitter(task):
yield from task.handle_created(session)
yield SentinelImportTask(task.toppath, task.paths)
- return _extend_pipeline(emitter(task),
- lookup_candidates(session),
- user_query(session))
+ return _extend_pipeline(
+ emitter(task), lookup_candidates(session), user_query(session)
+ )
# As albums: group items by albums and create task for each album
if task.choice_flag is action.ALBUMS:
- return _extend_pipeline([task],
- group_albums(session),
- lookup_candidates(session),
- user_query(session))
+ return _extend_pipeline(
+ [task],
+ group_albums(session),
+ lookup_candidates(session),
+ user_query(session),
+ )
resolve_duplicates(session, task)
@@ -1507,12 +1557,13 @@ def emitter(task):
# Record merged paths in the session so they are not reimported
session.mark_merged(duplicate_paths)
- merged_task = ImportTask(None, task.paths + duplicate_paths,
- task.items + duplicate_items)
+ merged_task = ImportTask(
+ None, task.paths + duplicate_paths, task.items + duplicate_items
+ )
- return _extend_pipeline([merged_task],
- lookup_candidates(session),
- user_query(session))
+ return _extend_pipeline(
+ [merged_task], lookup_candidates(session), user_query(session)
+ )
apply_choice(session, task)
return task
@@ -1525,30 +1576,32 @@ def resolve_duplicates(session, task):
if task.choice_flag in (action.ASIS, action.APPLY, action.RETAG):
found_duplicates = task.find_duplicates(session.lib)
if found_duplicates:
- log.debug('found duplicates: {}'.format(
- [o.id for o in found_duplicates]
- ))
+ log.debug(
+ "found duplicates: {}".format([o.id for o in found_duplicates])
+ )
# Get the default action to follow from config.
- duplicate_action = config['import']['duplicate_action'].as_choice({
- 'skip': 's',
- 'keep': 'k',
- 'remove': 'r',
- 'merge': 'm',
- 'ask': 'a',
- })
- log.debug('default action for duplicates: {0}', duplicate_action)
-
- if duplicate_action == 's':
+ duplicate_action = config["import"]["duplicate_action"].as_choice(
+ {
+ "skip": "s",
+ "keep": "k",
+ "remove": "r",
+ "merge": "m",
+ "ask": "a",
+ }
+ )
+ log.debug("default action for duplicates: {0}", duplicate_action)
+
+ if duplicate_action == "s":
# Skip new.
task.set_choice(action.SKIP)
- elif duplicate_action == 'k':
+ elif duplicate_action == "k":
# Keep both. Do nothing; leave the choice intact.
pass
- elif duplicate_action == 'r':
+ elif duplicate_action == "r":
# Remove old.
task.should_remove_duplicates = True
- elif duplicate_action == 'm':
+ elif duplicate_action == "m":
# Merge duplicates together
task.should_merge_duplicates = True
else:
@@ -1568,7 +1621,7 @@ def import_asis(session, task):
if task.skip:
return
- log.info('{}', displayable_path(task.paths))
+ log.info("{}", displayable_path(task.paths))
task.set_choice(action.ASIS)
apply_choice(session, task)
@@ -1583,7 +1636,7 @@ def apply_choice(session, task):
# Change metadata.
if task.apply:
task.apply_metadata()
- plugins.send('import_task_apply', session=session, task=task)
+ plugins.send("import_task_apply", session=session, task=task)
task.add(session.lib)
@@ -1592,7 +1645,7 @@ def apply_choice(session, task):
# NOTE: This cannot be done before the ``task.add()`` call above,
# because then the ``ImportTask`` won't have an `album` for which
# it can set the fields.
- if config['import']['set_fields']:
+ if config["import"]["set_fields"]:
task.set_fields(session.lib)
@@ -1623,22 +1676,22 @@ def manipulate_files(session, task):
if task.should_remove_duplicates:
task.remove_duplicates(session.lib)
- if session.config['move']:
+ if session.config["move"]:
operation = MoveOperation.MOVE
- elif session.config['copy']:
+ elif session.config["copy"]:
operation = MoveOperation.COPY
- elif session.config['link']:
+ elif session.config["link"]:
operation = MoveOperation.LINK
- elif session.config['hardlink']:
+ elif session.config["hardlink"]:
operation = MoveOperation.HARDLINK
- elif session.config['reflink']:
+ elif session.config["reflink"]:
operation = MoveOperation.REFLINK
else:
operation = None
task.manipulate_files(
operation,
- write=session.config['write'],
+ write=session.config["write"],
session=session,
)
@@ -1648,14 +1701,13 @@ def manipulate_files(session, task):
@pipeline.stage
def log_files(session, task):
- """A coroutine (pipeline stage) to log each file to be imported.
- """
+ """A coroutine (pipeline stage) to log each file to be imported."""
if isinstance(task, SingletonImportTask):
- log.info('Singleton: {0}', displayable_path(task.item['path']))
+ log.info("Singleton: {0}", displayable_path(task.item["path"]))
elif task.items:
- log.info('Album: {0}', displayable_path(task.paths[0]))
+ log.info("Album: {0}", displayable_path(task.paths[0]))
for item in task.items:
- log.info(' {0}', displayable_path(item['path']))
+ log.info(" {0}", displayable_path(item["path"]))
def group_albums(session):
@@ -1665,6 +1717,7 @@ def group_albums(session):
Groups are identified using their artist and album fields. The
pipeline stage emits new album tasks for each discovered group.
"""
+
def group(item):
return (item.albumartist or item.artist, item.album)
@@ -1677,16 +1730,15 @@ def group(item):
sorted_items = sorted(task.items, key=group)
for _, items in itertools.groupby(sorted_items, group):
items = list(items)
- task = ImportTask(task.toppath, [i.path for i in items],
- items)
+ task = ImportTask(task.toppath, [i.path for i in items], items)
tasks += task.handle_created(session)
tasks.append(SentinelImportTask(task.toppath, task.paths))
task = pipeline.multiple(tasks)
-MULTIDISC_MARKERS = (br'dis[ck]', br'cd')
-MULTIDISC_PAT_FMT = br'^(.*%s[\W_]*)\d'
+MULTIDISC_MARKERS = (rb"dis[ck]", rb"cd")
+MULTIDISC_PAT_FMT = rb"^(.*%s[\W_]*)\d"
def is_subdir_of_any_in_list(path, dirs):
@@ -1704,21 +1756,21 @@ def albums_in_dir(path):
containing any media files is an album.
"""
collapse_pat = collapse_paths = collapse_items = None
- ignore = config['ignore'].as_str_seq()
- ignore_hidden = config['ignore_hidden'].get(bool)
+ ignore = config["ignore"].as_str_seq()
+ ignore_hidden = config["ignore_hidden"].get(bool)
- for root, dirs, files in sorted_walk(path, ignore=ignore,
- ignore_hidden=ignore_hidden,
- logger=log):
+ for root, dirs, files in sorted_walk(
+ path, ignore=ignore, ignore_hidden=ignore_hidden, logger=log
+ ):
items = [os.path.join(root, f) for f in files]
# If we're currently collapsing the constituent directories in a
# multi-disc album, check whether we should continue collapsing
# and add the current directory. If so, just add the directory
# and move on to the next directory. If not, stop collapsing.
if collapse_paths:
- if (is_subdir_of_any_in_list(root, collapse_paths)) or \
- (collapse_pat and
- collapse_pat.match(os.path.basename(root))):
+ if (is_subdir_of_any_in_list(root, collapse_paths)) or (
+ collapse_pat and collapse_pat.match(os.path.basename(root))
+ ):
# Still collapsing.
collapse_paths.append(root)
collapse_items += items
@@ -1738,7 +1790,7 @@ def albums_in_dir(path):
start_collapsing = False
for marker in MULTIDISC_MARKERS:
# We're using replace on %s due to lack of .format() on bytestrings
- p = MULTIDISC_PAT_FMT.replace(b'%s', marker)
+ p = MULTIDISC_PAT_FMT.replace(b"%s", marker)
marker_pat = re.compile(p, re.I)
match = marker_pat.match(os.path.basename(root))
@@ -1756,8 +1808,7 @@ def albums_in_dir(path):
if match:
match_group = re.escape(match.group(1))
subdir_pat = re.compile(
- b''.join([b'^', match_group, br'\d']),
- re.I
+ b"".join([b"^", match_group, rb"\d"]), re.I
)
else:
start_collapsing = False
@@ -1779,8 +1830,7 @@ def albums_in_dir(path):
# Set the current pattern to match directories with the same
# prefix as this one, followed by a digit.
collapse_pat = re.compile(
- b''.join([b'^', re.escape(match.group(1)), br'\d']),
- re.I
+ b"".join([b"^", re.escape(match.group(1)), rb"\d"]), re.I
)
break
diff --git a/beets/library.py b/beets/library.py
index ccd431b857..d0019fb809 100644
--- a/beets/library.py
+++ b/beets/library.py
@@ -16,34 +16,39 @@
"""
import os
-import sys
-import unicodedata
-import time
import re
-import string
import shlex
+import string
+import sys
+import time
+import unicodedata
-from beets import logging
from mediafile import MediaFile, UnreadableFileError
-from beets import plugins
-from beets import util
-from beets.util import bytestring_path, syspath, normpath, samefile, \
- MoveOperation, lazy_property
-from beets.util.functemplate import template, Template
-from beets import dbcore
-from beets.dbcore import types
+
import beets
+from beets import dbcore, logging, plugins, util
+from beets.dbcore import types
+from beets.util import (
+ MoveOperation,
+ bytestring_path,
+ lazy_property,
+ normpath,
+ samefile,
+ syspath,
+)
+from beets.util.functemplate import Template, template
# To use the SQLite "blob" type, it doesn't suffice to provide a byte
# string; SQLite treats that as encoded text. Wrapping it in a
# `memoryview` tells it that we actually mean non-text data.
BLOB_TYPE = memoryview
-log = logging.getLogger('beets')
+log = logging.getLogger("beets")
# Library-specific query types.
+
class SingletonQuery(dbcore.FieldQuery):
"""This query is responsible for the 'singleton' lookup.
@@ -54,8 +59,9 @@ class SingletonQuery(dbcore.FieldQuery):
Using util.str2bool ensures that lookups like singleton:true, singleton:1
and singleton:false, singleton:0 are handled consistently.
"""
+
def __new__(cls, field, value, *args, **kwargs):
- query = dbcore.query.NoneQuery('album_id')
+ query = dbcore.query.NoneQuery("album_id")
if util.str2bool(value):
return query
return dbcore.query.NotQuery(query)
@@ -68,6 +74,7 @@ class PathQuery(dbcore.FieldQuery):
default, the behavior depends on the OS: case-insensitive on Windows
and case-sensitive otherwise.
"""
+
# For tests
force_implicit_query_detection = False
@@ -101,7 +108,7 @@ def __init__(self, field, pattern, fast=True, case_sensitive=None):
# Match the path as a single file.
self.file_path = path
# As a directory (prefix).
- self.dir_path = os.path.join(path, b'')
+ self.dir_path = os.path.join(path, b"")
@classmethod
def is_path_query(cls, query_part):
@@ -109,14 +116,15 @@ def is_path_query(cls, query_part):
Condition: separator precedes colon and the file exists.
"""
- colon = query_part.find(':')
+ colon = query_part.find(":")
if colon != -1:
query_part = query_part[:colon]
# Test both `sep` and `altsep` (i.e., both slash and backslash on
# Windows).
- if not (os.sep in query_part
- or (os.altsep and os.altsep in query_part)):
+ if not (
+ os.sep in query_part or (os.altsep and os.altsep in query_part)
+ ):
return False
if cls.force_implicit_query_detection:
@@ -132,32 +140,36 @@ def col_clause(self):
dir_blob = BLOB_TYPE(self.dir_path)
if self.case_sensitive:
- query_part = '({0} = ?) || (substr({0}, 1, ?) = ?)'
+ query_part = "({0} = ?) || (substr({0}, 1, ?) = ?)"
else:
- query_part = '(BYTELOWER({0}) = BYTELOWER(?)) || \
- (substr(BYTELOWER({0}), 1, ?) = BYTELOWER(?))'
+ query_part = "(BYTELOWER({0}) = BYTELOWER(?)) || \
+ (substr(BYTELOWER({0}), 1, ?) = BYTELOWER(?))"
- return query_part.format(self.field), \
- (file_blob, len(dir_blob), dir_blob)
+ return query_part.format(self.field), (
+ file_blob,
+ len(dir_blob),
+ dir_blob,
+ )
# Library-specific field types.
+
class DateType(types.Float):
# TODO representation should be `datetime` object
# TODO distinguish between date and time types
query = dbcore.query.DateQuery
def format(self, value):
- return time.strftime(beets.config['time_format'].as_str(),
- time.localtime(value or 0))
+ return time.strftime(
+ beets.config["time_format"].as_str(), time.localtime(value or 0)
+ )
def parse(self, string):
try:
# Try a formatted date string.
return time.mktime(
- time.strptime(string,
- beets.config['time_format'].as_str())
+ time.strptime(string, beets.config["time_format"].as_str())
)
except ValueError:
# Fall back to a plain timestamp number.
@@ -174,7 +186,7 @@ class PathType(types.Type):
the Unix filesystem abstraction.
"""
- sql = 'BLOB'
+ sql = "BLOB"
query = PathQuery
model_type = bytes
@@ -190,7 +202,7 @@ def null(self):
if self.nullable:
return None
else:
- return b''
+ return b""
def format(self, value):
return util.displayable_path(value)
@@ -224,12 +236,13 @@ class MusicalKey(types.String):
The standard format is C, Cm, C#, C#m, etc.
"""
+
ENHARMONIC = {
- r'db': 'c#',
- r'eb': 'd#',
- r'gb': 'f#',
- r'ab': 'g#',
- r'bb': 'a#',
+ r"db": "c#",
+ r"eb": "d#",
+ r"gb": "f#",
+ r"ab": "g#",
+ r"bb": "a#",
}
null = None
@@ -238,8 +251,8 @@ def parse(self, key):
key = key.lower()
for flat, sharp in self.ENHARMONIC.items():
key = re.sub(flat, sharp, key)
- key = re.sub(r'[\W\s]+minor', 'm', key)
- key = re.sub(r'[\W\s]+major', '', key)
+ key = re.sub(r"[\W\s]+minor", "m", key)
+ key = re.sub(r"[\W\s]+major", "", key)
return key.capitalize()
def normalize(self, key):
@@ -251,10 +264,11 @@ def normalize(self, key):
class DurationType(types.Float):
"""Human-friendly (M:SS) representation of a time interval."""
+
query = dbcore.query.DurationQuery
def format(self, value):
- if not beets.config['format_raw_length'].get(bool):
+ if not beets.config["format_raw_length"].get(bool):
return beets.ui.human_seconds_short(value or 0.0)
else:
return value
@@ -273,6 +287,7 @@ def parse(self, string):
# Library-specific sort types.
+
class SmartArtistSort(dbcore.query.Sort):
"""Sort by artist (either album artist or track artist),
prioritizing the sort field over the raw field.
@@ -285,30 +300,37 @@ def __init__(self, model_cls, ascending=True, case_insensitive=True):
def order_clause(self):
order = "ASC" if self.ascending else "DESC"
- field = 'albumartist' if self.album else 'artist'
- collate = 'COLLATE NOCASE' if self.case_insensitive else ''
- return ('(CASE {0}_sort WHEN NULL THEN {0} '
- 'WHEN "" THEN {0} '
- 'ELSE {0}_sort END) {1} {2}').format(field, collate, order)
+ field = "albumartist" if self.album else "artist"
+ collate = "COLLATE NOCASE" if self.case_insensitive else ""
+ return (
+ "(CASE {0}_sort WHEN NULL THEN {0} "
+ 'WHEN "" THEN {0} '
+ "ELSE {0}_sort END) {1} {2}"
+ ).format(field, collate, order)
def sort(self, objs):
if self.album:
+
def field(a):
return a.albumartist_sort or a.albumartist
+
else:
+
def field(i):
return i.artist_sort or i.artist
if self.case_insensitive:
+
def key(x):
return field(x).lower()
+
else:
key = field
return sorted(objs, key=key, reverse=not self.ascending)
# Special path format key.
-PF_KEY_DEFAULT = 'default'
+PF_KEY_DEFAULT = "default"
# Exceptions.
@@ -339,18 +361,19 @@ class ReadError(FileOperationError):
"""An error while reading a file (i.e. in `Item.read`)."""
def __str__(self):
- return 'error reading ' + str(super())
+ return "error reading " + str(super())
class WriteError(FileOperationError):
"""An error while writing a file (i.e. in `Item.write`)."""
def __str__(self):
- return 'error writing ' + str(super())
+ return "error writing " + str(super())
# Item and Album model classes.
+
class LibModel(dbcore.Model):
"""Shared concrete functionality for Items and Albums."""
@@ -364,15 +387,15 @@ def _template_funcs(self):
def store(self, fields=None):
super().store(fields)
- plugins.send('database_change', lib=self._db, model=self)
+ plugins.send("database_change", lib=self._db, model=self)
def remove(self):
super().remove()
- plugins.send('database_change', lib=self._db, model=self)
+ plugins.send("database_change", lib=self._db, model=self)
def add(self, lib=None):
super().add(lib)
- plugins.send('database_change', lib=self._db, model=self)
+ plugins.send("database_change", lib=self._db, model=self)
def __format__(self, spec):
if not spec:
@@ -384,7 +407,7 @@ def __str__(self):
return format(self)
def __bytes__(self):
- return self.__str__().encode('utf-8')
+ return self.__str__().encode("utf-8")
class FormattedItemMapping(dbcore.db.FormattedMapping):
@@ -393,13 +416,12 @@ class FormattedItemMapping(dbcore.db.FormattedMapping):
Album-level fields take precedence if `for_path` is true.
"""
- ALL_KEYS = '*'
+ ALL_KEYS = "*"
def __init__(self, item, included_keys=ALL_KEYS, for_path=False):
# We treat album and item keys specially here,
# so exclude transitive album keys from the model's keys.
- super().__init__(item, included_keys=[],
- for_path=for_path)
+ super().__init__(item, included_keys=[], for_path=for_path)
self.included_keys = included_keys
if included_keys == self.ALL_KEYS:
# Performance note: this triggers a database query.
@@ -419,8 +441,10 @@ def album_keys(self):
if self.included_keys == self.ALL_KEYS:
# Performance note: this triggers a database query.
for key in self.album.keys(computed=True):
- if key in Album.item_keys \
- or key not in self.item._fields.keys():
+ if (
+ key in Album.item_keys
+ or key not in self.item._fields.keys()
+ ):
album_keys.append(key)
else:
album_keys = self.included_keys
@@ -456,10 +480,10 @@ def __getitem__(self, key):
# This is helpful in path formats when the album artist is unset
# on as-is imports.
try:
- if key == 'artist' and not value:
- return self._get('albumartist')
- elif key == 'albumartist' and not value:
- return self._get('artist')
+ if key == "artist" and not value:
+ return self._get("albumartist")
+ elif key == "albumartist" and not value:
+ return self._get("artist")
except KeyError:
pass
@@ -474,117 +498,123 @@ def __len__(self):
class Item(LibModel):
"""Represent a song or track."""
- _table = 'items'
- _flex_table = 'item_attributes'
+
+ _table = "items"
+ _flex_table = "item_attributes"
_fields = {
- 'id': types.PRIMARY_ID,
- 'path': PathType(),
- 'album_id': types.FOREIGN_ID,
-
- 'title': types.STRING,
- 'artist': types.STRING,
- 'artists': types.MULTI_VALUE_DSV,
- 'artists_ids': types.MULTI_VALUE_DSV,
- 'artist_sort': types.STRING,
- 'artists_sort': types.MULTI_VALUE_DSV,
- 'artist_credit': types.STRING,
- 'artists_credit': types.MULTI_VALUE_DSV,
- 'remixer': types.STRING,
- 'album': types.STRING,
- 'albumartist': types.STRING,
- 'albumartists': types.MULTI_VALUE_DSV,
- 'albumartist_sort': types.STRING,
- 'albumartists_sort': types.MULTI_VALUE_DSV,
- 'albumartist_credit': types.STRING,
- 'albumartists_credit': types.MULTI_VALUE_DSV,
- 'genre': types.STRING,
- 'style': types.STRING,
- 'discogs_albumid': types.INTEGER,
- 'discogs_artistid': types.INTEGER,
- 'discogs_labelid': types.INTEGER,
- 'lyricist': types.STRING,
- 'composer': types.STRING,
- 'composer_sort': types.STRING,
- 'work': types.STRING,
- 'mb_workid': types.STRING,
- 'work_disambig': types.STRING,
- 'arranger': types.STRING,
- 'grouping': types.STRING,
- 'year': types.PaddedInt(4),
- 'month': types.PaddedInt(2),
- 'day': types.PaddedInt(2),
- 'track': types.PaddedInt(2),
- 'tracktotal': types.PaddedInt(2),
- 'disc': types.PaddedInt(2),
- 'disctotal': types.PaddedInt(2),
- 'lyrics': types.STRING,
- 'comments': types.STRING,
- 'bpm': types.INTEGER,
- 'comp': types.BOOLEAN,
- 'mb_trackid': types.STRING,
- 'mb_albumid': types.STRING,
- 'mb_artistid': types.STRING,
- 'mb_artistids': types.MULTI_VALUE_DSV,
- 'mb_albumartistid': types.STRING,
- 'mb_albumartistids': types.MULTI_VALUE_DSV,
- 'mb_releasetrackid': types.STRING,
- 'trackdisambig': types.STRING,
- 'albumtype': types.STRING,
- 'albumtypes': types.SEMICOLON_SPACE_DSV,
- 'label': types.STRING,
- 'acoustid_fingerprint': types.STRING,
- 'acoustid_id': types.STRING,
- 'mb_releasegroupid': types.STRING,
- 'release_group_title': types.STRING,
- 'asin': types.STRING,
- 'isrc': types.STRING,
- 'catalognum': types.STRING,
- 'script': types.STRING,
- 'language': types.STRING,
- 'country': types.STRING,
- 'albumstatus': types.STRING,
- 'media': types.STRING,
- 'albumdisambig': types.STRING,
- 'releasegroupdisambig': types.STRING,
- 'disctitle': types.STRING,
- 'encoder': types.STRING,
- 'rg_track_gain': types.NULL_FLOAT,
- 'rg_track_peak': types.NULL_FLOAT,
- 'rg_album_gain': types.NULL_FLOAT,
- 'rg_album_peak': types.NULL_FLOAT,
- 'r128_track_gain': types.NULL_FLOAT,
- 'r128_album_gain': types.NULL_FLOAT,
- 'original_year': types.PaddedInt(4),
- 'original_month': types.PaddedInt(2),
- 'original_day': types.PaddedInt(2),
- 'initial_key': MusicalKey(),
-
- 'length': DurationType(),
- 'bitrate': types.ScaledInt(1000, 'kbps'),
- 'bitrate_mode': types.STRING,
- 'encoder_info': types.STRING,
- 'encoder_settings': types.STRING,
- 'format': types.STRING,
- 'samplerate': types.ScaledInt(1000, 'kHz'),
- 'bitdepth': types.INTEGER,
- 'channels': types.INTEGER,
- 'mtime': DateType(),
- 'added': DateType(),
+ "id": types.PRIMARY_ID,
+ "path": PathType(),
+ "album_id": types.FOREIGN_ID,
+ "title": types.STRING,
+ "artist": types.STRING,
+ "artists": types.MULTI_VALUE_DSV,
+ "artists_ids": types.MULTI_VALUE_DSV,
+ "artist_sort": types.STRING,
+ "artists_sort": types.MULTI_VALUE_DSV,
+ "artist_credit": types.STRING,
+ "artists_credit": types.MULTI_VALUE_DSV,
+ "remixer": types.STRING,
+ "album": types.STRING,
+ "albumartist": types.STRING,
+ "albumartists": types.MULTI_VALUE_DSV,
+ "albumartist_sort": types.STRING,
+ "albumartists_sort": types.MULTI_VALUE_DSV,
+ "albumartist_credit": types.STRING,
+ "albumartists_credit": types.MULTI_VALUE_DSV,
+ "genre": types.STRING,
+ "style": types.STRING,
+ "discogs_albumid": types.INTEGER,
+ "discogs_artistid": types.INTEGER,
+ "discogs_labelid": types.INTEGER,
+ "lyricist": types.STRING,
+ "composer": types.STRING,
+ "composer_sort": types.STRING,
+ "work": types.STRING,
+ "mb_workid": types.STRING,
+ "work_disambig": types.STRING,
+ "arranger": types.STRING,
+ "grouping": types.STRING,
+ "year": types.PaddedInt(4),
+ "month": types.PaddedInt(2),
+ "day": types.PaddedInt(2),
+ "track": types.PaddedInt(2),
+ "tracktotal": types.PaddedInt(2),
+ "disc": types.PaddedInt(2),
+ "disctotal": types.PaddedInt(2),
+ "lyrics": types.STRING,
+ "comments": types.STRING,
+ "bpm": types.INTEGER,
+ "comp": types.BOOLEAN,
+ "mb_trackid": types.STRING,
+ "mb_albumid": types.STRING,
+ "mb_artistid": types.STRING,
+ "mb_artistids": types.MULTI_VALUE_DSV,
+ "mb_albumartistid": types.STRING,
+ "mb_albumartistids": types.MULTI_VALUE_DSV,
+ "mb_releasetrackid": types.STRING,
+ "trackdisambig": types.STRING,
+ "albumtype": types.STRING,
+ "albumtypes": types.SEMICOLON_SPACE_DSV,
+ "label": types.STRING,
+ "acoustid_fingerprint": types.STRING,
+ "acoustid_id": types.STRING,
+ "mb_releasegroupid": types.STRING,
+ "release_group_title": types.STRING,
+ "asin": types.STRING,
+ "isrc": types.STRING,
+ "catalognum": types.STRING,
+ "script": types.STRING,
+ "language": types.STRING,
+ "country": types.STRING,
+ "albumstatus": types.STRING,
+ "media": types.STRING,
+ "albumdisambig": types.STRING,
+ "releasegroupdisambig": types.STRING,
+ "disctitle": types.STRING,
+ "encoder": types.STRING,
+ "rg_track_gain": types.NULL_FLOAT,
+ "rg_track_peak": types.NULL_FLOAT,
+ "rg_album_gain": types.NULL_FLOAT,
+ "rg_album_peak": types.NULL_FLOAT,
+ "r128_track_gain": types.NULL_FLOAT,
+ "r128_album_gain": types.NULL_FLOAT,
+ "original_year": types.PaddedInt(4),
+ "original_month": types.PaddedInt(2),
+ "original_day": types.PaddedInt(2),
+ "initial_key": MusicalKey(),
+ "length": DurationType(),
+ "bitrate": types.ScaledInt(1000, "kbps"),
+ "bitrate_mode": types.STRING,
+ "encoder_info": types.STRING,
+ "encoder_settings": types.STRING,
+ "format": types.STRING,
+ "samplerate": types.ScaledInt(1000, "kHz"),
+ "bitdepth": types.INTEGER,
+ "channels": types.INTEGER,
+ "mtime": DateType(),
+ "added": DateType(),
}
- _search_fields = ('artist', 'title', 'comments',
- 'album', 'albumartist', 'genre')
+ _search_fields = (
+ "artist",
+ "title",
+ "comments",
+ "album",
+ "albumartist",
+ "genre",
+ )
_types = {
- 'data_source': types.STRING,
+ "data_source": types.STRING,
}
# Set of item fields that are backed by `MediaFile` fields.
# Any kind of field (fixed, flexible, and computed) may be a media
# field. Only these fields are read from disk in `read` and written in
# `write`.
- _media_fields = set(MediaFile.readable_fields()) \
- .intersection(_fields.keys())
+ _media_fields = set(MediaFile.readable_fields()).intersection(
+ _fields.keys()
+ )
# Set of item fields that are backed by *writable* `MediaFile` tag
# fields.
@@ -594,11 +624,11 @@ class Item(LibModel):
_formatter = FormattedItemMapping
- _sorts = {'artist': SmartArtistSort}
+ _sorts = {"artist": SmartArtistSort}
- _queries = {'singleton': SingletonQuery}
+ _queries = {"singleton": SingletonQuery}
- _format_config_key = 'format_item'
+ _format_config_key = "format_item"
# Cached album object. Read-only.
__album = None
@@ -626,8 +656,8 @@ def _cached_album(self, album):
@classmethod
def _getters(cls):
getters = plugins.item_field_getters()
- getters['singleton'] = lambda i: i.album_id is None
- getters['filesize'] = Item.try_filesize # In bytes.
+ getters["singleton"] = lambda i: i.album_id is None
+ getters["filesize"] = Item.try_filesize # In bytes.
return getters
@classmethod
@@ -642,12 +672,12 @@ def from_path(cls, path):
def __setitem__(self, key, value):
"""Set the item's value for a standard field or a flexattr."""
# Encode unicode paths and read buffers.
- if key == 'path':
+ if key == "path":
if isinstance(value, str):
value = bytestring_path(value)
elif isinstance(value, BLOB_TYPE):
value = bytes(value)
- elif key == 'album_id':
+ elif key == "album_id":
self._cached_album = None
changed = super()._setitem(key, value)
@@ -672,10 +702,12 @@ def __repr__(self):
# This must not use `with_album=True`, because that might access
# the database. When debugging, that is not guaranteed to succeed, and
# can even deadlock due to the database lock.
- return '{}({})'.format(
+ return "{}({})".format(
type(self).__name__,
- ', '.join('{}={!r}'.format(k, self[k])
- for k in self.keys(with_album=False)),
+ ", ".join(
+ "{}={!r}".format(k, self[k])
+ for k in self.keys(with_album=False)
+ ),
)
def keys(self, computed=False, with_album=True):
@@ -709,8 +741,8 @@ def update(self, values):
If mtime is specified, it is not reset (as it might otherwise be).
"""
super().update(values)
- if self.mtime == 0 and 'mtime' in values:
- self.mtime = values['mtime']
+ if self.mtime == 0 and "mtime" in values:
+ self.mtime = values["mtime"]
def clear(self):
"""Set all key/value pairs to None."""
@@ -782,15 +814,16 @@ def write(self, path=None, tags=None, id3v23=None):
path = normpath(path)
if id3v23 is None:
- id3v23 = beets.config['id3v23'].get(bool)
+ id3v23 = beets.config["id3v23"].get(bool)
# Get the data to write to the file.
item_tags = dict(self)
- item_tags = {k: v for k, v in item_tags.items()
- if k in self._media_fields} # Only write media fields.
+ item_tags = {
+ k: v for k, v in item_tags.items() if k in self._media_fields
+ } # Only write media fields.
if tags is not None:
item_tags.update(tags)
- plugins.send('write', item=self, path=path, tags=item_tags)
+ plugins.send("write", item=self, path=path, tags=item_tags)
# Open the file.
try:
@@ -808,7 +841,7 @@ def write(self, path=None, tags=None, id3v23=None):
# The file has a new mtime.
if path == self.path:
self.mtime = self.current_mtime()
- plugins.send('after_write', item=self, path=path)
+ plugins.send("after_write", item=self, path=path)
def try_write(self, *args, **kwargs):
"""Call `write()` but catch and log `FileOperationError`
@@ -840,8 +873,10 @@ def try_sync(self, write, move, with_album=True):
if move:
# Check whether this file is inside the library directory.
if self._db and self._db.directory in util.ancestry(self.path):
- log.debug('moving {0} to synchronize path',
- util.displayable_path(self.path))
+ log.debug(
+ "moving {0} to synchronize path",
+ util.displayable_path(self.path),
+ )
self.move(with_album=with_album)
self.store()
@@ -858,33 +893,43 @@ def move_file(self, dest, operation=MoveOperation.MOVE):
if not util.samefile(self.path, dest):
dest = util.unique_path(dest)
if operation == MoveOperation.MOVE:
- plugins.send("before_item_moved", item=self, source=self.path,
- destination=dest)
+ plugins.send(
+ "before_item_moved",
+ item=self,
+ source=self.path,
+ destination=dest,
+ )
util.move(self.path, dest)
- plugins.send("item_moved", item=self, source=self.path,
- destination=dest)
+ plugins.send(
+ "item_moved", item=self, source=self.path, destination=dest
+ )
elif operation == MoveOperation.COPY:
util.copy(self.path, dest)
- plugins.send("item_copied", item=self, source=self.path,
- destination=dest)
+ plugins.send(
+ "item_copied", item=self, source=self.path, destination=dest
+ )
elif operation == MoveOperation.LINK:
util.link(self.path, dest)
- plugins.send("item_linked", item=self, source=self.path,
- destination=dest)
+ plugins.send(
+ "item_linked", item=self, source=self.path, destination=dest
+ )
elif operation == MoveOperation.HARDLINK:
util.hardlink(self.path, dest)
- plugins.send("item_hardlinked", item=self, source=self.path,
- destination=dest)
+ plugins.send(
+ "item_hardlinked", item=self, source=self.path, destination=dest
+ )
elif operation == MoveOperation.REFLINK:
util.reflink(self.path, dest, fallback=False)
- plugins.send("item_reflinked", item=self, source=self.path,
- destination=dest)
+ plugins.send(
+ "item_reflinked", item=self, source=self.path, destination=dest
+ )
elif operation == MoveOperation.REFLINK_AUTO:
util.reflink(self.path, dest, fallback=True)
- plugins.send("item_reflinked", item=self, source=self.path,
- destination=dest)
+ plugins.send(
+ "item_reflinked", item=self, source=self.path, destination=dest
+ )
else:
- assert False, 'unknown MoveOperation'
+ assert False, "unknown MoveOperation"
# Either copying or moving succeeded, so update the stored path.
self.path = dest
@@ -903,7 +948,7 @@ def try_filesize(self):
try:
return os.path.getsize(syspath(self.path))
except (OSError, Exception) as exc:
- log.warning('could not get filesize: {0}', exc)
+ log.warning("could not get filesize: {0}", exc)
return 0
# Model methods.
@@ -925,7 +970,7 @@ def remove(self, delete=False, with_album=True):
album.remove(delete, False)
# Send a 'item_removed' signal to plugins
- plugins.send('item_removed', item=self)
+ plugins.send("item_removed", item=self)
# Delete the associated file.
if delete:
@@ -934,8 +979,13 @@ def remove(self, delete=False, with_album=True):
self._db._memotable = {}
- def move(self, operation=MoveOperation.MOVE, basedir=None,
- with_album=True, store=True):
+ def move(
+ self,
+ operation=MoveOperation.MOVE,
+ basedir=None,
+ with_album=True,
+ store=True,
+ ):
"""Move the item to its designated location within the library
directory (provided by destination()).
@@ -983,8 +1033,14 @@ def move(self, operation=MoveOperation.MOVE, basedir=None,
# Templating.
- def destination(self, fragment=False, basedir=None, platform=None,
- path_formats=None, replacements=None):
+ def destination(
+ self,
+ fragment=False,
+ basedir=None,
+ platform=None,
+ path_formats=None,
+ replacements=None,
+ ):
"""Return the path in the library directory designated for the
item (i.e., where the file ought to be).
@@ -1026,34 +1082,36 @@ def destination(self, fragment=False, basedir=None, platform=None,
subpath = self.evaluate_template(subpath_tmpl, True)
# Prepare path for output: normalize Unicode characters.
- if platform == 'darwin':
- subpath = unicodedata.normalize('NFD', subpath)
+ if platform == "darwin":
+ subpath = unicodedata.normalize("NFD", subpath)
else:
- subpath = unicodedata.normalize('NFC', subpath)
+ subpath = unicodedata.normalize("NFC", subpath)
- if beets.config['asciify_paths']:
+ if beets.config["asciify_paths"]:
subpath = util.asciify_path(
- subpath,
- beets.config['path_sep_replace'].as_str()
+ subpath, beets.config["path_sep_replace"].as_str()
)
- maxlen = beets.config['max_filename_length'].get(int)
+ maxlen = beets.config["max_filename_length"].get(int)
if not maxlen:
# When zero, try to determine from filesystem.
maxlen = util.max_filename_length(self._db.directory)
subpath, fellback = util.legalize_path(
- subpath, replacements, maxlen,
- os.path.splitext(self.path)[1], fragment
+ subpath,
+ replacements,
+ maxlen,
+ os.path.splitext(self.path)[1],
+ fragment,
)
if fellback:
# Print an error message if legalization fell back to
# default replacements because of the maximum length.
log.warning(
- 'Fell back to default replacements when naming '
- 'file {}. Configure replacements to avoid lengthening '
- 'the filename.',
- subpath
+ "Fell back to default replacements when naming "
+ "file {}. Configure replacements to avoid lengthening "
+ "the filename.",
+ subpath,
)
if fragment:
@@ -1068,118 +1126,118 @@ class Album(LibModel):
Reflects the library's "albums" table, including album art.
"""
- _table = 'albums'
- _flex_table = 'album_attributes'
+
+ _table = "albums"
+ _flex_table = "album_attributes"
_always_dirty = True
_fields = {
- 'id': types.PRIMARY_ID,
- 'artpath': PathType(True),
- 'added': DateType(),
-
- 'albumartist': types.STRING,
- 'albumartist_sort': types.STRING,
- 'albumartist_credit': types.STRING,
- 'albumartists': types.MULTI_VALUE_DSV,
- 'albumartists_sort': types.MULTI_VALUE_DSV,
- 'albumartists_credit': types.MULTI_VALUE_DSV,
- 'album': types.STRING,
- 'genre': types.STRING,
- 'style': types.STRING,
- 'discogs_albumid': types.INTEGER,
- 'discogs_artistid': types.INTEGER,
- 'discogs_labelid': types.INTEGER,
- 'year': types.PaddedInt(4),
- 'month': types.PaddedInt(2),
- 'day': types.PaddedInt(2),
- 'disctotal': types.PaddedInt(2),
- 'comp': types.BOOLEAN,
- 'mb_albumid': types.STRING,
- 'mb_albumartistid': types.STRING,
- 'albumtype': types.STRING,
- 'albumtypes': types.SEMICOLON_SPACE_DSV,
- 'label': types.STRING,
- 'mb_releasegroupid': types.STRING,
- 'release_group_title': types.STRING,
- 'asin': types.STRING,
- 'catalognum': types.STRING,
- 'script': types.STRING,
- 'language': types.STRING,
- 'country': types.STRING,
- 'albumstatus': types.STRING,
- 'albumdisambig': types.STRING,
- 'releasegroupdisambig': types.STRING,
- 'rg_album_gain': types.NULL_FLOAT,
- 'rg_album_peak': types.NULL_FLOAT,
- 'r128_album_gain': types.NULL_FLOAT,
- 'original_year': types.PaddedInt(4),
- 'original_month': types.PaddedInt(2),
- 'original_day': types.PaddedInt(2),
+ "id": types.PRIMARY_ID,
+ "artpath": PathType(True),
+ "added": DateType(),
+ "albumartist": types.STRING,
+ "albumartist_sort": types.STRING,
+ "albumartist_credit": types.STRING,
+ "albumartists": types.MULTI_VALUE_DSV,
+ "albumartists_sort": types.MULTI_VALUE_DSV,
+ "albumartists_credit": types.MULTI_VALUE_DSV,
+ "album": types.STRING,
+ "genre": types.STRING,
+ "style": types.STRING,
+ "discogs_albumid": types.INTEGER,
+ "discogs_artistid": types.INTEGER,
+ "discogs_labelid": types.INTEGER,
+ "year": types.PaddedInt(4),
+ "month": types.PaddedInt(2),
+ "day": types.PaddedInt(2),
+ "disctotal": types.PaddedInt(2),
+ "comp": types.BOOLEAN,
+ "mb_albumid": types.STRING,
+ "mb_albumartistid": types.STRING,
+ "albumtype": types.STRING,
+ "albumtypes": types.SEMICOLON_SPACE_DSV,
+ "label": types.STRING,
+ "mb_releasegroupid": types.STRING,
+ "release_group_title": types.STRING,
+ "asin": types.STRING,
+ "catalognum": types.STRING,
+ "script": types.STRING,
+ "language": types.STRING,
+ "country": types.STRING,
+ "albumstatus": types.STRING,
+ "albumdisambig": types.STRING,
+ "releasegroupdisambig": types.STRING,
+ "rg_album_gain": types.NULL_FLOAT,
+ "rg_album_peak": types.NULL_FLOAT,
+ "r128_album_gain": types.NULL_FLOAT,
+ "original_year": types.PaddedInt(4),
+ "original_month": types.PaddedInt(2),
+ "original_day": types.PaddedInt(2),
}
- _search_fields = ('album', 'albumartist', 'genre')
+ _search_fields = ("album", "albumartist", "genre")
_types = {
- 'path': PathType(),
- 'data_source': types.STRING,
+ "path": PathType(),
+ "data_source": types.STRING,
}
_sorts = {
- 'albumartist': SmartArtistSort,
- 'artist': SmartArtistSort,
+ "albumartist": SmartArtistSort,
+ "artist": SmartArtistSort,
}
# List of keys that are set on an album's items.
item_keys = [
- 'added',
- 'albumartist',
- 'albumartists',
- 'albumartist_sort',
- 'albumartists_sort',
- 'albumartist_credit',
- 'albumartists_credit',
- 'album',
- 'genre',
- 'style',
- 'discogs_albumid',
- 'discogs_artistid',
- 'discogs_labelid',
- 'year',
- 'month',
- 'day',
- 'disctotal',
- 'comp',
- 'mb_albumid',
- 'mb_albumartistid',
- 'albumtype',
- 'albumtypes',
- 'label',
- 'mb_releasegroupid',
- 'asin',
- 'catalognum',
- 'script',
- 'language',
- 'country',
- 'albumstatus',
- 'albumdisambig',
- 'releasegroupdisambig',
- 'release_group_title',
- 'rg_album_gain',
- 'rg_album_peak',
- 'r128_album_gain',
- 'original_year',
- 'original_month',
- 'original_day',
+ "added",
+ "albumartist",
+ "albumartists",
+ "albumartist_sort",
+ "albumartists_sort",
+ "albumartist_credit",
+ "albumartists_credit",
+ "album",
+ "genre",
+ "style",
+ "discogs_albumid",
+ "discogs_artistid",
+ "discogs_labelid",
+ "year",
+ "month",
+ "day",
+ "disctotal",
+ "comp",
+ "mb_albumid",
+ "mb_albumartistid",
+ "albumtype",
+ "albumtypes",
+ "label",
+ "mb_releasegroupid",
+ "asin",
+ "catalognum",
+ "script",
+ "language",
+ "country",
+ "albumstatus",
+ "albumdisambig",
+ "releasegroupdisambig",
+ "release_group_title",
+ "rg_album_gain",
+ "rg_album_peak",
+ "r128_album_gain",
+ "original_year",
+ "original_month",
+ "original_day",
]
- _format_config_key = 'format_album'
+ _format_config_key = "format_album"
@classmethod
def _getters(cls):
# In addition to plugin-provided computed fields, also expose
# the album's directory as `path`.
getters = plugins.album_field_getters()
- getters['path'] = Album.item_dir
- getters['albumtotal'] = Album._albumtotal
+ getters["path"] = Album.item_dir
+ getters["albumtotal"] = Album._albumtotal
return getters
def items(self):
@@ -1191,7 +1249,7 @@ def items(self):
Since :meth:`Album.items` predates these methods, and is
likely to be used by plugins, we keep this interface as-is.
"""
- return self._db.items(dbcore.MatchQuery('album_id', self.id))
+ return self._db.items(dbcore.MatchQuery("album_id", self.id))
def remove(self, delete=False, with_items=True):
"""Remove this album and all its associated items from the
@@ -1206,7 +1264,7 @@ def remove(self, delete=False, with_items=True):
super().remove()
# Send a 'album_removed' signal to plugins
- plugins.send('album_removed', album=self)
+ plugins.send("album_removed", album=self)
# Delete art file.
if delete:
@@ -1231,8 +1289,10 @@ def move_art(self, operation=MoveOperation.MOVE):
return
if not os.path.exists(syspath(old_art)):
- log.error('removing reference to missing album art file {}',
- util.displayable_path(old_art))
+ log.error(
+ "removing reference to missing album art file {}",
+ util.displayable_path(old_art),
+ )
self.artpath = None
return
@@ -1241,9 +1301,11 @@ def move_art(self, operation=MoveOperation.MOVE):
return
new_art = util.unique_path(new_art)
- log.debug('moving album art {0} to {1}',
- util.displayable_path(old_art),
- util.displayable_path(new_art))
+ log.debug(
+ "moving album art {0} to {1}",
+ util.displayable_path(old_art),
+ util.displayable_path(new_art),
+ )
if operation == MoveOperation.MOVE:
util.move(old_art, new_art)
util.prune_dirs(os.path.dirname(old_art), self._db.directory)
@@ -1258,7 +1320,7 @@ def move_art(self, operation=MoveOperation.MOVE):
elif operation == MoveOperation.REFLINK_AUTO:
util.reflink(old_art, new_art, fallback=True)
else:
- assert False, 'unknown MoveOperation'
+ assert False, "unknown MoveOperation"
self.artpath = new_art
def move(self, operation=MoveOperation.MOVE, basedir=None, store=True):
@@ -1284,8 +1346,7 @@ def move(self, operation=MoveOperation.MOVE, basedir=None, store=True):
# Move items.
items = list(self.items())
for item in items:
- item.move(operation, basedir=basedir, with_album=False,
- store=store)
+ item.move(operation, basedir=basedir, with_album=False, store=store)
# Move art.
self.move_art(operation)
@@ -1298,12 +1359,12 @@ def item_dir(self):
"""
item = self.items().get()
if not item:
- raise ValueError('empty album for album id %d' % self.id)
+ raise ValueError("empty album for album id %d" % self.id)
return os.path.dirname(item.path)
def _albumtotal(self):
"""Return the total number of tracks on all discs on the album."""
- if self.disctotal == 1 or not beets.config['per_disc_numbering']:
+ if self.disctotal == 1 or not beets.config["per_disc_numbering"]:
return self.items()[0].tracktotal
counted = []
@@ -1335,16 +1396,15 @@ def art_destination(self, image, item_dir=None):
image = bytestring_path(image)
item_dir = item_dir or self.item_dir()
- filename_tmpl = template(
- beets.config['art_filename'].as_str())
+ filename_tmpl = template(beets.config["art_filename"].as_str())
subpath = self.evaluate_template(filename_tmpl, True)
- if beets.config['asciify_paths']:
+ if beets.config["asciify_paths"]:
subpath = util.asciify_path(
- subpath,
- beets.config['path_sep_replace'].as_str()
+ subpath, beets.config["path_sep_replace"].as_str()
)
- subpath = util.sanitize_path(subpath,
- replacements=self._db.replacements)
+ subpath = util.sanitize_path(
+ subpath, replacements=self._db.replacements
+ )
subpath = bytestring_path(subpath)
_, ext = os.path.splitext(image)
@@ -1382,7 +1442,7 @@ def set_art(self, path, copy=True):
util.move(path, artdest)
self.artpath = artdest
- plugins.send('art_set', album=self)
+ plugins.send("art_set", album=self)
def store(self, fields=None, inherit=True):
"""Update the database with the album information.
@@ -1403,7 +1463,7 @@ def store(self, fields=None, inherit=True):
track_updates[key] = self[key]
elif key not in self: # is a fixed or a flexible attribute
track_deletes.add(key)
- elif key != 'id': # is a flexible attribute
+ elif key != "id": # is a flexible attribute
track_updates[key] = self[key]
with self._db.transaction():
@@ -1436,6 +1496,7 @@ def try_sync(self, write, move, inherit=True):
# Query construction helpers.
+
def parse_query_parts(parts, model_cls):
"""Given a beets query string as a list of components, return the
`Query` and `Sort` they represent.
@@ -1445,9 +1506,9 @@ def parse_query_parts(parts, model_cls):
"""
# Get query types and their prefix characters.
prefixes = {
- ':': dbcore.query.RegexpQuery,
- '=~': dbcore.query.StringQuery,
- '=': dbcore.query.MatchQuery,
+ ":": dbcore.query.RegexpQuery,
+ "=~": dbcore.query.StringQuery,
+ "=": dbcore.query.MatchQuery,
}
prefixes.update(plugins.queries())
@@ -1455,7 +1516,7 @@ def parse_query_parts(parts, model_cls):
# containing path separators (/).
parts = [f"path:{s}" if PathQuery.is_path_query(s) else s for s in parts]
- case_insensitive = beets.config['sort_case_insensitive'].get(bool)
+ case_insensitive = beets.config["sort_case_insensitive"].get(bool)
return dbcore.parse_sorted_query(
model_cls, parts, prefixes, case_insensitive
@@ -1478,7 +1539,7 @@ def parse_query_string(s, model_cls):
def _sqlite_bytelower(bytestring):
- """ A custom ``bytelower`` sqlite function so we can compare
+ """A custom ``bytelower`` sqlite function so we can compare
bytestrings in a semi case insensitive fashion.
This is to work around sqlite builds are that compiled with
@@ -1490,16 +1551,20 @@ def _sqlite_bytelower(bytestring):
# The Library: interface to the database.
+
class Library(dbcore.Database):
"""A database of music containing songs and albums."""
+
_models = (Item, Album)
- def __init__(self, path='library.blb',
- directory='~/Music',
- path_formats=((PF_KEY_DEFAULT,
- '$artist/$album/$track $title'),),
- replacements=None):
- timeout = beets.config['timeout'].as_number()
+ def __init__(
+ self,
+ path="library.blb",
+ directory="~/Music",
+ path_formats=((PF_KEY_DEFAULT, "$artist/$album/$track $title"),),
+ replacements=None,
+ ):
+ timeout = beets.config["timeout"].as_number()
super().__init__(path, timeout=timeout)
self.directory = bytestring_path(normpath(directory))
@@ -1510,7 +1575,7 @@ def __init__(self, path='library.blb',
def _create_connection(self):
conn = super()._create_connection()
- conn.create_function('bytelower', 1, _sqlite_bytelower)
+ conn.create_function("bytelower", 1, _sqlite_bytelower)
return conn
# Adding objects to the database.
@@ -1533,7 +1598,7 @@ def add_album(self, items):
be empty.
"""
if not items:
- raise ValueError('need at least one item')
+ raise ValueError("need at least one item")
# Create the album structure using metadata from the first item.
values = {key: items[0][key] for key in Album.item_keys}
@@ -1575,21 +1640,21 @@ def _fetch(self, model_cls, query, sort=None):
if parsed_sort and not isinstance(parsed_sort, dbcore.query.NullSort):
sort = parsed_sort
- return super()._fetch(
- model_cls, query, sort
- )
+ return super()._fetch(model_cls, query, sort)
@staticmethod
def get_default_album_sort():
"""Get a :class:`Sort` object for albums from the config option."""
return dbcore.sort_from_strings(
- Album, beets.config['sort_album'].as_str_seq())
+ Album, beets.config["sort_album"].as_str_seq()
+ )
@staticmethod
def get_default_item_sort():
"""Get a :class:`Sort` object for items from the config option."""
return dbcore.sort_from_strings(
- Item, beets.config['sort_item'].as_str_seq())
+ Item, beets.config["sort_item"].as_str_seq()
+ )
def albums(self, query=None, sort=None):
"""Get :class:`Album` objects matching the query."""
@@ -1625,6 +1690,7 @@ def get_album(self, item_or_id):
# Default path template resources.
+
def _int_arg(s):
"""Convert a string argument to an integer for use in a template
function.
@@ -1642,7 +1708,8 @@ class DefaultTemplateFunctions:
additional context to the functions -- specifically, the Item being
evaluated.
"""
- _prefix = 'tmpl_'
+
+ _prefix = "tmpl_"
def __init__(self, item=None, lib=None):
"""Parametrize the functions.
@@ -1662,7 +1729,7 @@ def functions(self):
"""
out = {}
for key in self._func_names:
- out[key[len(self._prefix):]] = getattr(self, key)
+ out[key[len(self._prefix) :]] = getattr(self, key)
return out
@staticmethod
@@ -1683,15 +1750,15 @@ def tmpl_title(s):
@staticmethod
def tmpl_left(s, chars):
"""Get the leftmost characters of a string."""
- return s[0:_int_arg(chars)]
+ return s[0 : _int_arg(chars)]
@staticmethod
def tmpl_right(s, chars):
"""Get the rightmost characters of a string."""
- return s[-_int_arg(chars):]
+ return s[-_int_arg(chars) :]
@staticmethod
- def tmpl_if(condition, trueval, falseval=''):
+ def tmpl_if(condition, trueval, falseval=""):
"""If ``condition`` is nonempty and nonzero, emit ``trueval``;
otherwise, emit ``falseval`` (if provided).
"""
@@ -1711,12 +1778,12 @@ def tmpl_if(condition, trueval, falseval=''):
@staticmethod
def tmpl_asciify(s):
"""Translate non-ASCII characters to their ASCII equivalents."""
- return util.asciify_path(s, beets.config['path_sep_replace'].as_str())
+ return util.asciify_path(s, beets.config["path_sep_replace"].as_str())
@staticmethod
def tmpl_time(s, fmt):
"""Format a time value using `strftime`."""
- cur_fmt = beets.config['time_format'].as_str()
+ cur_fmt = beets.config["time_format"].as_str()
return time.strftime(fmt, time.strptime(s, cur_fmt))
def tmpl_aunique(self, keys=None, disam=None, bracket=None):
@@ -1732,7 +1799,7 @@ def tmpl_aunique(self, keys=None, disam=None, bracket=None):
"""
# Fast paths: no album, no item or library, or memoized value.
if not self.item or not self.lib:
- return ''
+ return ""
if isinstance(self.item, Item):
album_id = self.item.album_id
@@ -1740,9 +1807,9 @@ def tmpl_aunique(self, keys=None, disam=None, bracket=None):
album_id = self.item.id
if album_id is None:
- return ''
+ return ""
- memokey = self._tmpl_unique_memokey('aunique', keys, disam, album_id)
+ memokey = self._tmpl_unique_memokey("aunique", keys, disam, album_id)
memoval = self.lib._memotable.get(memokey)
if memoval is not None:
return memoval
@@ -1750,9 +1817,16 @@ def tmpl_aunique(self, keys=None, disam=None, bracket=None):
album = self.lib.get_album(album_id)
return self._tmpl_unique(
- 'aunique', keys, disam, bracket, album_id, album, album.item_keys,
+ "aunique",
+ keys,
+ disam,
+ bracket,
+ album_id,
+ album,
+ album.item_keys,
# Do nothing for singletons.
- lambda a: a is None)
+ lambda a: a is None,
+ )
def tmpl_sunique(self, keys=None, disam=None, bracket=None):
"""Generate a string that is guaranteed to be unique among all
@@ -1767,7 +1841,7 @@ def tmpl_sunique(self, keys=None, disam=None, bracket=None):
"""
# Fast paths: no album, no item or library, or memoized value.
if not self.item or not self.lib:
- return ''
+ return ""
if isinstance(self.item, Item):
item_id = self.item.id
@@ -1775,14 +1849,20 @@ def tmpl_sunique(self, keys=None, disam=None, bracket=None):
raise NotImplementedError("sunique is only implemented for items")
if item_id is None:
- return ''
+ return ""
return self._tmpl_unique(
- 'sunique', keys, disam, bracket, item_id, self.item,
+ "sunique",
+ keys,
+ disam,
+ bracket,
+ item_id,
+ self.item,
Item.all_keys(),
# Do nothing for non singletons.
lambda i: i.album_id is not None,
- initial_subqueries=[dbcore.query.NoneQuery('album_id', True)])
+ initial_subqueries=[dbcore.query.NoneQuery("album_id", True)],
+ )
def _tmpl_unique_memokey(self, name, keys, disam, item_id):
"""Get the memokey for the unique template named "name" for the
@@ -1790,8 +1870,18 @@ def _tmpl_unique_memokey(self, name, keys, disam, item_id):
"""
return (name, keys, disam, item_id)
- def _tmpl_unique(self, name, keys, disam, bracket, item_id, db_item,
- item_keys, skip_item, initial_subqueries=None):
+ def _tmpl_unique(
+ self,
+ name,
+ keys,
+ disam,
+ bracket,
+ item_id,
+ db_item,
+ item_keys,
+ skip_item,
+ initial_subqueries=None,
+ ):
"""Generate a string that is guaranteed to be unique among all items of
the same type as "db_item" who share the same set of keys.
@@ -1818,13 +1908,13 @@ def _tmpl_unique(self, name, keys, disam, bracket, item_id, db_item,
return memoval
if skip_item(db_item):
- self.lib._memotable[memokey] = ''
- return ''
+ self.lib._memotable[memokey] = ""
+ return ""
- keys = keys or beets.config[name]['keys'].as_str()
- disam = disam or beets.config[name]['disambiguators'].as_str()
+ keys = keys or beets.config[name]["keys"].as_str()
+ disam = disam or beets.config[name]["disambiguators"].as_str()
if bracket is None:
- bracket = beets.config[name]['bracket'].as_str()
+ bracket = beets.config[name]["bracket"].as_str()
keys = keys.split()
disam = disam.split()
@@ -1833,33 +1923,35 @@ def _tmpl_unique(self, name, keys, disam, bracket, item_id, db_item,
bracket_l = bracket[0]
bracket_r = bracket[1]
else:
- bracket_l = ''
- bracket_r = ''
+ bracket_l = ""
+ bracket_r = ""
# Find matching items to disambiguate with.
subqueries = []
if initial_subqueries is not None:
subqueries.extend(initial_subqueries)
for key in keys:
- value = db_item.get(key, '')
+ value = db_item.get(key, "")
# Use slow queries for flexible attributes.
fast = key in item_keys
subqueries.append(dbcore.MatchQuery(key, value, fast))
query = dbcore.AndQuery(subqueries)
- ambigous_items = (self.lib.items(query)
- if isinstance(db_item, Item)
- else self.lib.albums(query))
+ ambigous_items = (
+ self.lib.items(query)
+ if isinstance(db_item, Item)
+ else self.lib.albums(query)
+ )
# If there's only one item to matching these details, then do
# nothing.
if len(ambigous_items) == 1:
- self.lib._memotable[memokey] = ''
- return ''
+ self.lib._memotable[memokey] = ""
+ return ""
# Find the first disambiguator that distinguishes the items.
for disambiguator in disam:
# Get the value for each item for the current field.
- disam_values = {s.get(disambiguator, '') for s in ambigous_items}
+ disam_values = {s.get(disambiguator, "") for s in ambigous_items}
# If the set of unique values is equal to the number of
# items in the disambiguation set, we're done -- this is
@@ -1868,7 +1960,7 @@ def _tmpl_unique(self, name, keys, disam, bracket, item_id, db_item,
break
else:
# No disambiguator distinguished all fields.
- res = f' {bracket_l}{item_id}{bracket_r}'
+ res = f" {bracket_l}{item_id}{bracket_r}"
self.lib._memotable[memokey] = res
return res
@@ -1877,15 +1969,15 @@ def _tmpl_unique(self, name, keys, disam, bracket, item_id, db_item,
# Return empty string if disambiguator is empty.
if disam_value:
- res = f' {bracket_l}{disam_value}{bracket_r}'
+ res = f" {bracket_l}{disam_value}{bracket_r}"
else:
- res = ''
+ res = ""
self.lib._memotable[memokey] = res
return res
@staticmethod
- def tmpl_first(s, count=1, skip=0, sep='; ', join_str='; '):
+ def tmpl_first(s, count=1, skip=0, sep="; ", join_str="; "):
"""Get the item(s) from x to y in a string separated by something
and join then with something.
@@ -1900,8 +1992,8 @@ def tmpl_first(s, count=1, skip=0, sep='; ', join_str='; '):
count = skip + int(count)
return join_str.join(s.split(sep)[skip:count])
- def tmpl_ifdef(self, field, trueval='', falseval=''):
- """ If field exists return trueval or the field (default)
+ def tmpl_ifdef(self, field, trueval="", falseval=""):
+ """If field exists return trueval or the field (default)
otherwise, emit return falseval (if provided).
Args:
@@ -1919,6 +2011,8 @@ def tmpl_ifdef(self, field, trueval='', falseval=''):
# Get the name of tmpl_* functions in the above class.
-DefaultTemplateFunctions._func_names = \
- [s for s in dir(DefaultTemplateFunctions)
- if s.startswith(DefaultTemplateFunctions._prefix)]
+DefaultTemplateFunctions._func_names = [
+ s
+ for s in dir(DefaultTemplateFunctions)
+ if s.startswith(DefaultTemplateFunctions._prefix)
+]
diff --git a/beets/logging.py b/beets/logging.py
index 05c22bd1ca..45201667c1 100644
--- a/beets/logging.py
+++ b/beets/logging.py
@@ -21,10 +21,10 @@
"""
-from copy import copy
+import logging
import sys
import threading
-import logging
+from copy import copy
def logsafe(val):
@@ -40,7 +40,7 @@ def logsafe(val):
# (a) only do this for paths, if they can be given a distinct
# type, and (b) warn the developer if they do this for other
# bytestrings.
- return val.decode('utf-8', 'replace')
+ return val.decode("utf-8", "replace")
# Other objects are used as-is so field access, etc., still works in
# the format string. Relies on a working __str__ implementation.
@@ -70,8 +70,16 @@ def __str__(self):
kwargs = {k: logsafe(v) for (k, v) in self.kwargs.items()}
return self.msg.format(*args, **kwargs)
- def _log(self, level, msg, args, exc_info=None, extra=None,
- stack_info=False, **kwargs):
+ def _log(
+ self,
+ level,
+ msg,
+ args,
+ exc_info=None,
+ extra=None,
+ stack_info=False,
+ **kwargs,
+ ):
"""Log msg.format(*args, **kwargs)"""
m = self._LogMessage(msg, args, kwargs)
@@ -84,19 +92,18 @@ def _log(self, level, msg, args, exc_info=None, extra=None,
stacklevel = {}
return super()._log(
- level,
- m,
- (),
- exc_info=exc_info,
- extra=extra,
- stack_info=stack_info,
- **stacklevel,
+ level,
+ m,
+ (),
+ exc_info=exc_info,
+ extra=extra,
+ stack_info=stack_info,
+ **stacklevel,
)
class ThreadLocalLevelLogger(logging.Logger):
- """A version of `Logger` whose level is thread-local instead of shared.
- """
+ """A version of `Logger` whose level is thread-local instead of shared."""
def __init__(self, name, level=logging.NOTSET):
self._thread_level = threading.local()
@@ -134,6 +141,7 @@ class BeetsLogger(ThreadLocalLevelLogger, StrFormatLogger):
# Act like the stdlib logging module by re-exporting its namespace.
from logging import * # noqa
+
# Override the `getLogger` to use our machinery.
def getLogger(name=None): # noqa
if name:
diff --git a/beets/mediafile.py b/beets/mediafile.py
index 46288a71dc..8bde9274cb 100644
--- a/beets/mediafile.py
+++ b/beets/mediafile.py
@@ -13,9 +13,10 @@
# included in all copies or substantial portions of the Software.
+import warnings
+
import mediafile
-import warnings
warnings.warn(
"beets.mediafile is deprecated; use mediafile instead",
# Show the location of the `import mediafile` statement as the warning's
@@ -26,7 +27,7 @@
# Import everything from the mediafile module into this module.
for key, value in mediafile.__dict__.items():
- if key not in ['__name__']:
+ if key not in ["__name__"]:
globals()[key] = value
# Cleanup namespace.
diff --git a/beets/plugins.py b/beets/plugins.py
index 8974cb1175..270da97512 100644
--- a/beets/plugins.py
+++ b/beets/plugins.py
@@ -15,26 +15,25 @@
"""Support for beets plugins."""
-import traceback
-import re
-import inspect
import abc
+import inspect
+import re
+import traceback
from collections import defaultdict
from functools import wraps
+import mediafile
import beets
from beets import logging
-import mediafile
-
-PLUGIN_NAMESPACE = 'beetsplug'
+PLUGIN_NAMESPACE = "beetsplug"
# Plugins using the Last.fm API can share the same API key.
-LASTFM_KEY = '2dc3914abf35f0d9c92d97d8f8e42b43'
+LASTFM_KEY = "2dc3914abf35f0d9c92d97d8f8e42b43"
# Global logger.
-log = logging.getLogger('beets')
+log = logging.getLogger("beets")
class PluginConflictException(Exception):
@@ -51,11 +50,10 @@ class PluginLogFilter(logging.Filter):
"""
def __init__(self, plugin):
- self.prefix = f'{plugin.name}: '
+ self.prefix = f"{plugin.name}: "
def filter(self, record):
- if hasattr(record.msg, 'msg') and isinstance(record.msg.msg,
- str):
+ if hasattr(record.msg, "msg") and isinstance(record.msg.msg, str):
# A _LogMessage from our hacked-up Logging replacement.
record.msg.msg = self.prefix + record.msg.msg
elif isinstance(record.msg, str):
@@ -65,6 +63,7 @@ def filter(self, record):
# Managing the plugins themselves.
+
class BeetsPlugin:
"""The base class for all beets plugins. Plugins provide
functionality by defining a subclass of BeetsPlugin and overriding
@@ -72,9 +71,8 @@ class BeetsPlugin:
"""
def __init__(self, name=None):
- """Perform one-time plugin setup.
- """
- self.name = name or self.__module__.split('.')[-1]
+ """Perform one-time plugin setup."""
+ self.name = name or self.__module__.split(".")[-1]
self.config = beets.config[self.name]
if not self.template_funcs:
self.template_funcs = {}
@@ -97,10 +95,11 @@ def commands(self):
return ()
def _set_stage_log_level(self, stages):
- """Adjust all the stages in `stages` to WARNING logging level.
- """
- return [self._set_log_level_and_params(logging.WARNING, stage)
- for stage in stages]
+ """Adjust all the stages in `stages` to WARNING logging level."""
+ return [
+ self._set_log_level_and_params(logging.WARNING, stage)
+ for stage in stages
+ ]
def get_early_import_stages(self):
"""Return a list of functions that should be called as importer
@@ -134,12 +133,11 @@ def _set_log_level_and_params(self, base_log_level, func):
def wrapper(*args, **kwargs):
assert self._log.level == logging.NOTSET
- verbosity = beets.config['verbose'].get(int)
+ verbosity = beets.config["verbose"].get(int)
log_level = max(logging.DEBUG, base_log_level - 10 * verbosity)
self._log.setLevel(log_level)
if argspec.varkw is None:
- kwargs = {k: v for k, v in kwargs.items()
- if k in argspec.args}
+ kwargs = {k: v for k, v in kwargs.items() if k in argspec.args}
try:
return func(*args, **kwargs)
@@ -149,8 +147,7 @@ def wrapper(*args, **kwargs):
return wrapper
def queries(self):
- """Should return a dict mapping prefixes to Query subclasses.
- """
+ """Should return a dict mapping prefixes to Query subclasses."""
return {}
def track_distance(self, item, info):
@@ -201,6 +198,7 @@ def add_media_field(self, name, descriptor):
"""
# Defer import to prevent circular dependency
from beets import library
+
mediafile.MediaFile.add_field(name, descriptor)
library.Item._media_fields.add(name)
@@ -208,8 +206,7 @@ def add_media_field(self, name, descriptor):
listeners = None
def register_listener(self, event, func):
- """Add a function as a listener for the specified event.
- """
+ """Add a function as a listener for the specified event."""
wrapped_func = self._set_log_level_and_params(logging.WARNING, func)
cls = self.__class__
@@ -230,11 +227,13 @@ def template_func(cls, name):
function will be invoked as ``%name{}`` from path format
strings.
"""
+
def helper(func):
if cls.template_funcs is None:
cls.template_funcs = {}
cls.template_funcs[name] = func
return func
+
return helper
@classmethod
@@ -244,11 +243,13 @@ def template_field(cls, name):
strings. The function must accept a single parameter, the Item
being formatted.
"""
+
def helper(func):
if cls.template_fields is None:
cls.template_fields = {}
cls.template_fields[name] = func
return func
+
return helper
@@ -262,25 +263,29 @@ def load_plugins(names=()):
BeetsPlugin subclasses desired.
"""
for name in names:
- modname = f'{PLUGIN_NAMESPACE}.{name}'
+ modname = f"{PLUGIN_NAMESPACE}.{name}"
try:
try:
namespace = __import__(modname, None, None)
except ImportError as exc:
# Again, this is hacky:
- if exc.args[0].endswith(' ' + name):
- log.warning('** plugin {0} not found', name)
+ if exc.args[0].endswith(" " + name):
+ log.warning("** plugin {0} not found", name)
else:
raise
else:
for obj in getattr(namespace, name).__dict__.values():
- if isinstance(obj, type) and issubclass(obj, BeetsPlugin) \
- and obj != BeetsPlugin and obj not in _classes:
+ if (
+ isinstance(obj, type)
+ and issubclass(obj, BeetsPlugin)
+ and obj != BeetsPlugin
+ and obj not in _classes
+ ):
_classes.add(obj)
except Exception:
log.warning(
- '** error loading plugin {}:\n{}',
+ "** error loading plugin {}:\n{}",
name,
traceback.format_exc(),
)
@@ -311,9 +316,9 @@ def find_plugins():
# Communication with plugins.
+
def commands():
- """Returns a list of Subcommand objects from all loaded plugins.
- """
+ """Returns a list of Subcommand objects from all loaded plugins."""
out = []
for plugin in find_plugins():
out += plugin.commands()
@@ -332,16 +337,16 @@ def queries():
def types(model_cls):
# Gives us `item_types` and `album_types`
- attr_name = f'{model_cls.__name__.lower()}_types'
+ attr_name = f"{model_cls.__name__.lower()}_types"
types = {}
for plugin in find_plugins():
plugin_types = getattr(plugin, attr_name, {})
for field in plugin_types:
if field in types and plugin_types[field] != types[field]:
raise PluginConflictException(
- 'Plugin {} defines flexible field {} '
- 'which has already been defined with '
- 'another type.'.format(plugin.name, field)
+ "Plugin {} defines flexible field {} "
+ "which has already been defined with "
+ "another type.".format(plugin.name, field)
)
types.update(plugin_types)
return types
@@ -349,7 +354,7 @@ def types(model_cls):
def named_queries(model_cls):
# Gather `item_queries` and `album_queries` from the plugins.
- attr_name = f'{model_cls.__name__.lower()}_queries'
+ attr_name = f"{model_cls.__name__.lower()}_queries"
queries = {}
for plugin in find_plugins():
plugin_queries = getattr(plugin, attr_name, {})
@@ -362,6 +367,7 @@ def track_distance(item, info):
Returns a Distance object.
"""
from beets.autotag.hooks import Distance
+
dist = Distance()
for plugin in find_plugins():
dist.update(plugin.track_distance(item, info))
@@ -371,6 +377,7 @@ def track_distance(item, info):
def album_distance(items, album_info, mapping):
"""Returns the album distance calculated by plugins."""
from beets.autotag.hooks import Distance
+
dist = Distance()
for plugin in find_plugins():
dist.update(plugin.album_distance(items, album_info, mapping))
@@ -378,23 +385,21 @@ def album_distance(items, album_info, mapping):
def candidates(items, artist, album, va_likely, extra_tags=None):
- """Gets MusicBrainz candidates for an album from each plugin.
- """
+ """Gets MusicBrainz candidates for an album from each plugin."""
for plugin in find_plugins():
- yield from plugin.candidates(items, artist, album, va_likely,
- extra_tags)
+ yield from plugin.candidates(
+ items, artist, album, va_likely, extra_tags
+ )
def item_candidates(item, artist, title):
- """Gets MusicBrainz candidates for an item from the plugins.
- """
+ """Gets MusicBrainz candidates for an item from the plugins."""
for plugin in find_plugins():
yield from plugin.item_candidates(item, artist, title)
def album_for_id(album_id):
- """Get AlbumInfo objects for a given ID string.
- """
+ """Get AlbumInfo objects for a given ID string."""
for plugin in find_plugins():
album = plugin.album_for_id(album_id)
if album:
@@ -402,8 +407,7 @@ def album_for_id(album_id):
def track_for_id(track_id):
- """Get TrackInfo objects for a given ID string.
- """
+ """Get TrackInfo objects for a given ID string."""
for plugin in find_plugins():
track = plugin.track_for_id(track_id)
if track:
@@ -439,6 +443,7 @@ def import_stages():
# New-style (lazy) plugin-provided fields.
+
def item_field_getters():
"""Get a dictionary mapping field names to unary functions that
compute the field's value.
@@ -451,8 +456,7 @@ def item_field_getters():
def album_field_getters():
- """As above, for album fields.
- """
+ """As above, for album fields."""
funcs = {}
for plugin in find_plugins():
if plugin.album_template_fields:
@@ -462,6 +466,7 @@ def album_field_getters():
# Event dispatch.
+
def event_handlers():
"""Find all event handlers from plugins as a dictionary mapping
event names to sequences of callables.
@@ -482,7 +487,7 @@ def send(event, **arguments):
Return a list of non-None values returned from the handlers.
"""
- log.debug('Sending event: {0}', event)
+ log.debug("Sending event: {0}", event)
results = []
for handler in event_handlers()[event]:
result = handler(**arguments)
@@ -497,11 +502,11 @@ def feat_tokens(for_artist=True):
The `for_artist` option determines whether the regex should be
suitable for matching artist fields (the default) or title fields.
"""
- feat_words = ['ft', 'featuring', 'feat', 'feat.', 'ft.']
+ feat_words = ["ft", "featuring", "feat", "feat.", "ft."]
if for_artist:
- feat_words += ['with', 'vs', 'and', 'con', '&']
- return r'(?<=\s)(?:{})(?=\s)'.format(
- '|'.join(re.escape(x) for x in feat_words)
+ feat_words += ["with", "vs", "and", "con", "&"]
+ return r"(?<=\s)(?:{})(?=\s)".format(
+ "|".join(re.escape(x) for x in feat_words)
)
@@ -517,7 +522,7 @@ def sanitize_choices(choices, choices_all):
if s not in seen:
if s in list(choices_all):
res.append(s)
- elif s == '*':
+ elif s == "*":
res.extend(others)
seen.add(s)
return res
@@ -550,11 +555,11 @@ def sanitize_pairs(pairs, pairs_all):
if x not in seen:
seen.add(x)
res.append(x)
- elif k == '*':
+ elif k == "*":
new = [o for o in others if o not in seen]
seen.update(new)
res.extend(new)
- elif v == '*':
+ elif v == "*":
new = [o for o in others if o not in seen and o[0] == k]
seen.update(new)
res.extend(new)
@@ -568,12 +573,15 @@ def notify_info_yielded(event):
Each yielded value is passed to plugins using the 'info' parameter of
'send'.
"""
+
def decorator(generator):
def decorated(*args, **kwargs):
for v in generator(*args, **kwargs):
send(event, info=v)
yield v
+
return decorated
+
return decorator
@@ -583,7 +591,7 @@ def get_distance(config, data_source, info):
"""
dist = beets.autotag.Distance()
if info.data_source == data_source:
- dist.add('source', config['source_weight'].as_number())
+ dist.add("source", config["source_weight"].as_number())
return dist
@@ -620,7 +628,7 @@ def apply_item_changes(lib, item, move, pretend, write):
class MetadataSourcePlugin(metaclass=abc.ABCMeta):
def __init__(self):
super().__init__()
- self.config.add({'source_weight': 0.5})
+ self.config.add({"source_weight": 0.5})
@abc.abstractproperty
def id_regex(self):
@@ -643,7 +651,7 @@ def track_url(self):
raise NotImplementedError
@abc.abstractmethod
- def _search_api(self, query_type, filters, keywords=''):
+ def _search_api(self, query_type, filters, keywords=""):
raise NotImplementedError
@abc.abstractmethod
@@ -655,7 +663,7 @@ def track_for_id(self, track_id=None, track_data=None):
raise NotImplementedError
@staticmethod
- def get_artist(artists, id_key='id', name_key='name', join_key=None):
+ def get_artist(artists, id_key="id", name_key="name", join_key=None):
"""Returns an artist string (all artists) and an artist_id (the main
artist) for a list of artist object dicts.
@@ -692,15 +700,15 @@ def get_artist(artists, id_key='id', name_key='name', join_key=None):
artist_id = artist[id_key]
name = artist[name_key]
# Strip disambiguation number.
- name = re.sub(r' \(\d+\)$', '', name)
+ name = re.sub(r" \(\d+\)$", "", name)
# Move articles to the front.
- name = re.sub(r'^(.*?), (a|an|the)$', r'\2 \1', name, flags=re.I)
+ name = re.sub(r"^(.*?), (a|an|the)$", r"\2 \1", name, flags=re.I)
# Use a join keyword if requested and available.
if idx < (total - 1): # Skip joining on last.
if join_key and artist.get(join_key, None):
name += f" {artist[join_key]} "
else:
- name += ', '
+ name += ", "
artist_string += name
return artist_string, artist_id
@@ -720,12 +728,10 @@ def _get_id(url_type, id_, id_regex):
:return: Album/track ID.
:rtype: str
"""
- log.debug(
- "Extracting {} ID from '{}'", url_type, id_
- )
- match = re.search(id_regex['pattern'].format(url_type), str(id_))
+ log.debug("Extracting {} ID from '{}'", url_type, id_)
+ match = re.search(id_regex["pattern"].format(url_type), str(id_))
if match:
- id_ = match.group(id_regex['match_group'])
+ id_ = match.group(id_regex["match_group"])
if id_:
return id_
return None
@@ -746,11 +752,11 @@ def candidates(self, items, artist, album, va_likely, extra_tags=None):
:return: Candidate AlbumInfo objects.
:rtype: list[beets.autotag.hooks.AlbumInfo]
"""
- query_filters = {'album': album}
+ query_filters = {"album": album}
if not va_likely:
- query_filters['artist'] = artist
- results = self._search_api(query_type='album', filters=query_filters)
- albums = [self.album_for_id(album_id=r['id']) for r in results]
+ query_filters["artist"] = artist
+ results = self._search_api(query_type="album", filters=query_filters)
+ albums = [self.album_for_id(album_id=r["id"]) for r in results]
return [a for a in albums if a is not None]
def item_candidates(self, item, artist, title):
@@ -767,7 +773,7 @@ def item_candidates(self, item, artist, title):
:rtype: list[beets.autotag.hooks.TrackInfo]
"""
tracks = self._search_api(
- query_type='track', keywords=title, filters={'artist': artist}
+ query_type="track", keywords=title, filters={"artist": artist}
)
return [self.track_for_id(track_data=track) for track in tracks]
diff --git a/beets/random.py b/beets/random.py
index eb4f55aff3..b3276bd3ed 100644
--- a/beets/random.py
+++ b/beets/random.py
@@ -16,20 +16,19 @@
"""
import random
-from operator import attrgetter
from itertools import groupby
+from operator import attrgetter
def _length(obj, album):
- """Get the duration of an item or album.
- """
+ """Get the duration of an item or album."""
if album:
return sum(i.length for i in obj.items())
else:
return obj.length
-def _equal_chance_permutation(objs, field='albumartist', random_gen=None):
+def _equal_chance_permutation(objs, field="albumartist", random_gen=None):
"""Generate (lazily) a permutation of the objects where every group
with equal values for `field` have an equal chance of appearing in
any given position.
@@ -86,8 +85,9 @@ def _take_time(iter, secs, album):
return out
-def random_objs(objs, album, number=1, time=None, equal_chance=False,
- random_gen=None):
+def random_objs(
+ objs, album, number=1, time=None, equal_chance=False, random_gen=None
+):
"""Get a random subset of the provided `objs`.
If `number` is provided, produce that many matches. Otherwise, if
diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py
index 815565d5ad..ae68e6413d 100644
--- a/beets/ui/__init__.py
+++ b/beets/ui/__init__.py
@@ -18,31 +18,28 @@
"""
-import optparse
-import textwrap
-import sys
-from difflib import SequenceMatcher
-import sqlite3
import errno
+import optparse
+import os.path
import re
+import sqlite3
import struct
+import sys
+import textwrap
import traceback
-import os.path
+from difflib import SequenceMatcher
-from beets import logging
-from beets import library
-from beets import plugins
-from beets import util
-from beets.util.functemplate import template
-from beets import config
-from beets.util import as_string
+import confuse
+
+from beets import config, library, logging, plugins, util
from beets.autotag import mb
-from beets.dbcore import query as db_query
from beets.dbcore import db
-import confuse
+from beets.dbcore import query as db_query
+from beets.util import as_string
+from beets.util.functemplate import template
# On Windows platforms, use colorama to support "ANSI" terminal colors.
-if sys.platform == 'win32':
+if sys.platform == "win32":
try:
import colorama
except ImportError:
@@ -51,15 +48,15 @@
colorama.init()
-log = logging.getLogger('beets')
+log = logging.getLogger("beets")
if not log.handlers:
log.addHandler(logging.StreamHandler())
log.propagate = False # Don't propagate to root handler.
PF_KEY_QUERIES = {
- 'comp': 'comp:true',
- 'singleton': 'singleton:true',
+ "comp": "comp:true",
+ "singleton": "singleton:true",
}
@@ -73,31 +70,29 @@ class UserError(Exception):
def _in_encoding():
- """Get the encoding to use for *inputting* strings from the console.
- """
+ """Get the encoding to use for *inputting* strings from the console."""
return _stream_encoding(sys.stdin)
def _out_encoding():
- """Get the encoding to use for *outputting* strings to the console.
- """
+ """Get the encoding to use for *outputting* strings to the console."""
return _stream_encoding(sys.stdout)
-def _stream_encoding(stream, default='utf-8'):
+def _stream_encoding(stream, default="utf-8"):
"""A helper for `_in_encoding` and `_out_encoding`: get the stream's
preferred encoding, using a configured override or a default
fallback if neither is not specified.
"""
# Configured override?
- encoding = config['terminal_encoding'].get()
+ encoding = config["terminal_encoding"].get()
if encoding:
return encoding
# For testing: When sys.stdout or sys.stdin is a StringIO under the
# test harness, it doesn't have an `encoding` attribute. Just use
# UTF-8.
- if not hasattr(stream, 'encoding'):
+ if not hasattr(stream, "encoding"):
return default
# Python's guessed output stream encoding, or UTF-8 as a fallback
@@ -124,19 +119,19 @@ def print_(*strings, **kwargs):
(it defaults to a newline).
"""
if not strings:
- strings = ['']
+ strings = [""]
assert isinstance(strings[0], str)
- txt = ' '.join(strings)
- txt += kwargs.get('end', '\n')
+ txt = " ".join(strings)
+ txt += kwargs.get("end", "\n")
# Encode the string and write it to stdout.
# On Python 3, sys.stdout expects text strings and uses the
# exception-throwing encoding error policy. To avoid throwing
# errors and use our configurable encoding override, we use the
# underlying bytes buffer instead.
- if hasattr(sys.stdout, 'buffer'):
- out = txt.encode(_out_encoding(), 'replace')
+ if hasattr(sys.stdout, "buffer"):
+ out = txt.encode(_out_encoding(), "replace")
sys.stdout.buffer.write(out)
sys.stdout.buffer.flush()
else:
@@ -147,9 +142,9 @@ def print_(*strings, **kwargs):
# Configuration wrappers.
+
def _bool_fallback(a, b):
- """Given a boolean or None, return the original value or a fallback.
- """
+ """Given a boolean or None, return the original value or a fallback."""
if a is None:
assert isinstance(b, bool)
return b
@@ -162,7 +157,7 @@ def should_write(write_opt=None):
"""Decide whether a command that updates metadata should also write
tags, using the importer configuration as the default.
"""
- return _bool_fallback(write_opt, config['import']['write'].get(bool))
+ return _bool_fallback(write_opt, config["import"]["write"].get(bool))
def should_move(move_opt=None):
@@ -177,8 +172,8 @@ def should_move(move_opt=None):
"""
return _bool_fallback(
move_opt,
- config['import']['move'].get(bool) or
- config['import']['copy'].get(bool)
+ config["import"]["move"].get(bool)
+ or config["import"]["copy"].get(bool),
)
@@ -186,8 +181,7 @@ def should_move(move_opt=None):
def indent(count):
- """Returns a string with `count` many spaces.
- """
+ """Returns a string with `count` many spaces."""
return " " * count
@@ -201,18 +195,25 @@ def input_(prompt=None):
# use print_() explicitly to display prompts.
# https://bugs.python.org/issue1927
if prompt:
- print_(prompt, end=' ')
+ print_(prompt, end=" ")
try:
resp = input()
except EOFError:
- raise UserError('stdin stream ended while input required')
+ raise UserError("stdin stream ended while input required")
return resp
-def input_options(options, require=False, prompt=None, fallback_prompt=None,
- numrange=None, default=None, max_width=72):
+def input_options(
+ options,
+ require=False,
+ prompt=None,
+ fallback_prompt=None,
+ numrange=None,
+ default=None,
+ max_width=72,
+):
"""Prompts a user for input. The sequence of `options` defines the
choices the user has. A single-letter shortcut is inferred for each
option; the user's choice is returned as that single, lower-case
@@ -252,33 +253,37 @@ def input_options(options, require=False, prompt=None, fallback_prompt=None,
found_letter = letter
break
else:
- raise ValueError('no unambiguous lettering found')
+ raise ValueError("no unambiguous lettering found")
letters[found_letter.lower()] = option
index = option.index(found_letter)
# Mark the option's shortcut letter for display.
if not require and (
- (default is None and not numrange and first) or
- (isinstance(default, str) and
- found_letter.lower() == default.lower())):
+ (default is None and not numrange and first)
+ or (
+ isinstance(default, str)
+ and found_letter.lower() == default.lower()
+ )
+ ):
# The first option is the default; mark it.
- show_letter = '[%s]' % found_letter.upper()
+ show_letter = "[%s]" % found_letter.upper()
is_default = True
else:
show_letter = found_letter.upper()
is_default = False
# Colorize the letter shortcut.
- show_letter = colorize('action_default' if is_default else 'action',
- show_letter)
+ show_letter = colorize(
+ "action_default" if is_default else "action", show_letter
+ )
# Insert the highlighted letter back into the word.
descr_color = "action_default" if is_default else "action_description"
capitalized.append(
colorize(descr_color, option[:index])
+ show_letter
- + colorize(descr_color, option[index + 1:])
+ + colorize(descr_color, option[index + 1 :])
)
display_letters.append(found_letter.upper())
@@ -300,12 +305,12 @@ def input_options(options, require=False, prompt=None, fallback_prompt=None,
if numrange:
if isinstance(default, int):
default_name = str(default)
- default_name = colorize('action_default', default_name)
- tmpl = '# selection (default %s)'
+ default_name = colorize("action_default", default_name)
+ tmpl = "# selection (default %s)"
prompt_parts.append(tmpl % default_name)
prompt_part_lengths.append(len(tmpl % str(default)))
else:
- prompt_parts.append('# selection')
+ prompt_parts.append("# selection")
prompt_part_lengths.append(len(prompt_parts[-1]))
prompt_parts += capitalized
prompt_part_lengths += [len(s) for s in options]
@@ -314,8 +319,9 @@ def input_options(options, require=False, prompt=None, fallback_prompt=None,
# Start prompt with U+279C: Heavy Round-Tipped Rightwards Arrow
prompt = colorize("action", "\u279C ")
line_length = 0
- for i, (part, length) in enumerate(zip(prompt_parts,
- prompt_part_lengths)):
+ for i, (part, length) in enumerate(
+ zip(prompt_parts, prompt_part_lengths)
+ ):
# Add punctuation.
if i == len(prompt_parts) - 1:
part += colorize("action_description", "?")
@@ -325,12 +331,12 @@ def input_options(options, require=False, prompt=None, fallback_prompt=None,
# Choose either the current line or the beginning of the next.
if line_length + length + 1 > max_width:
- prompt += '\n'
+ prompt += "\n"
line_length = 0
if line_length != 0:
# Not the beginning of the line; need a space.
- part = ' ' + part
+ part = " " + part
length += 1
prompt += part
@@ -339,10 +345,10 @@ def input_options(options, require=False, prompt=None, fallback_prompt=None,
# Make a fallback prompt too. This is displayed if the user enters
# something that is not recognized.
if not fallback_prompt:
- fallback_prompt = 'Enter one of '
+ fallback_prompt = "Enter one of "
if numrange:
- fallback_prompt += '%i-%i, ' % numrange
- fallback_prompt += ', '.join(display_letters) + ':'
+ fallback_prompt += "%i-%i, " % numrange
+ fallback_prompt += ", ".join(display_letters) + ":"
resp = input_(prompt)
while True:
@@ -398,24 +404,26 @@ def input_select_objects(prompt, objs, rep, prompt_all=None):
objects individually.
"""
choice = input_options(
- ('y', 'n', 's'), False,
- '%s? (Yes/no/select)' % (prompt_all or prompt))
+ ("y", "n", "s"), False, "%s? (Yes/no/select)" % (prompt_all or prompt)
+ )
print() # Blank line.
- if choice == 'y': # Yes.
+ if choice == "y": # Yes.
return objs
- elif choice == 's': # Select.
+ elif choice == "s": # Select.
out = []
for obj in objs:
rep(obj)
answer = input_options(
- ('y', 'n', 'q'), True, '%s? (yes/no/quit)' % prompt,
- 'Enter Y or N:'
+ ("y", "n", "q"),
+ True,
+ "%s? (yes/no/quit)" % prompt,
+ "Enter Y or N:",
)
- if answer == 'y':
+ if answer == "y":
out.append(obj)
- elif answer == 'q':
+ elif answer == "q":
return out
return out
@@ -425,15 +433,16 @@ def input_select_objects(prompt, objs, rep, prompt_all=None):
# Human output formatting.
+
def human_bytes(size):
"""Formats size, a number of bytes, in a human-readable way."""
- powers = ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y', 'H']
- unit = 'B'
+ powers = ["", "K", "M", "G", "T", "P", "E", "Z", "Y", "H"]
+ unit = "B"
for power in powers:
if size < 1024:
return f"{size:3.1f} {power}{unit}"
size /= 1024.0
- unit = 'iB'
+ unit = "iB"
return "big"
@@ -442,13 +451,13 @@ def human_seconds(interval):
interval using English words.
"""
units = [
- (1, 'second'),
- (60, 'minute'),
- (60, 'hour'),
- (24, 'day'),
- (7, 'week'),
- (52, 'year'),
- (10, 'decade'),
+ (1, "second"),
+ (60, "minute"),
+ (60, "hour"),
+ (24, "day"),
+ (7, "week"),
+ (52, "year"),
+ (10, "decade"),
]
for i in range(len(units) - 1):
increment, suffix = units[i]
@@ -469,7 +478,7 @@ def human_seconds_short(interval):
string.
"""
interval = int(interval)
- return '%i:%02i' % (interval // 60, interval % 60)
+ return "%i:%02i" % (interval // 60, interval % 60)
# Colorization.
@@ -700,7 +709,7 @@ def _colordiff(a, b):
else:
return (
colorize("text_diff_removed", str(a)),
- colorize("text_diff_added", str(b))
+ colorize("text_diff_added", str(b)),
)
a_out = []
@@ -708,19 +717,17 @@ def _colordiff(a, b):
matcher = SequenceMatcher(lambda x: False, a, b)
for op, a_start, a_end, b_start, b_end in matcher.get_opcodes():
- if op == 'equal':
+ if op == "equal":
# In both strings.
a_out.append(a[a_start:a_end])
b_out.append(b[b_start:b_end])
- elif op == 'insert':
+ elif op == "insert":
# Right only.
- b_out.append(colorize("text_diff_added",
- b[b_start:b_end]))
- elif op == 'delete':
+ b_out.append(colorize("text_diff_added", b[b_start:b_end]))
+ elif op == "delete":
# Left only.
- a_out.append(colorize("text_diff_removed",
- a[a_start:a_end]))
- elif op == 'replace':
+ a_out.append(colorize("text_diff_removed", a[a_start:a_end]))
+ elif op == "replace":
# Right and left differ. Colorise with second highlight if
# it's just a case change.
if a[a_start:a_end].lower() != b[b_start:b_end].lower():
@@ -728,21 +735,19 @@ def _colordiff(a, b):
b_color = "text_diff_added"
else:
a_color = b_color = "text_highlight_minor"
- a_out.append(colorize(a_color,
- a[a_start:a_end]))
- b_out.append(colorize(b_color,
- b[b_start:b_end]))
+ a_out.append(colorize(a_color, a[a_start:a_end]))
+ b_out.append(colorize(b_color, b[b_start:b_end]))
else:
assert False
- return ''.join(a_out), ''.join(b_out)
+ return "".join(a_out), "".join(b_out)
def colordiff(a, b):
"""Colorize differences between two values if color is enabled.
(Like _colordiff but conditional.)
"""
- if config['ui']['color']:
+ if config["ui"]["color"]:
return _colordiff(a, b)
else:
return str(a), str(b)
@@ -753,7 +758,7 @@ def get_path_formats(subview=None):
pairs.
"""
path_formats = []
- subview = subview or config['paths']
+ subview = subview or config["paths"]
for query, view in subview.items():
query = PF_KEY_QUERIES.get(query, query) # Expand common queries.
path_formats.append((query, template(view.as_str())))
@@ -761,25 +766,22 @@ def get_path_formats(subview=None):
def get_replacements():
- """Confuse validation function that reads regex/string pairs.
- """
+ """Confuse validation function that reads regex/string pairs."""
replacements = []
- for pattern, repl in config['replace'].get(dict).items():
- repl = repl or ''
+ for pattern, repl in config["replace"].get(dict).items():
+ repl = repl or ""
try:
replacements.append((re.compile(pattern), repl))
except re.error:
raise UserError(
- 'malformed regular expression in replace: {}'.format(
- pattern
- )
+ "malformed regular expression in replace: {}".format(pattern)
)
return replacements
def term_width():
"""Get the width (columns) of the terminal."""
- fallback = config['ui']['terminal_width'].get(int)
+ fallback = config["ui"]["terminal_width"].get(int)
# The fcntl and termios modules are not available on non-Unix
# platforms, so we fall back to a constant.
@@ -790,11 +792,11 @@ def term_width():
return fallback
try:
- buf = fcntl.ioctl(0, termios.TIOCGWINSZ, ' ' * 4)
+ buf = fcntl.ioctl(0, termios.TIOCGWINSZ, " " * 4)
except OSError:
return fallback
try:
- height, width = struct.unpack('hh', buf)
+ height, width = struct.unpack("hh", buf)
except struct.error:
return fallback
return width
@@ -811,11 +813,13 @@ def split_into_lines(string, width_tuple):
"""
first_width, middle_width, last_width = width_tuple
words = []
- esc_text = re.compile(r"""(?P[^\x1b]*)
+ esc_text = re.compile(
+ r"""(?P[^\x1b]*)
(?P(?:\x1b\[[;\d]*[A-Za-z])+)
(?P[^\x1b]+)(?P\x1b\[39;49;00m)
(?P[^\x1b]*)""",
- re.VERBOSE)
+ re.VERBOSE,
+ )
if uncolorize(string) == string:
# No colors in string
words = string.split()
@@ -849,22 +853,29 @@ def split_into_lines(string, width_tuple):
if space_before_text:
# Colorize each word with pre/post escapes
# Reconstruct colored words
- words += [m.group("esc") + raw_word
- + RESET_COLOR for raw_word in raw_words]
+ words += [
+ m.group("esc") + raw_word + RESET_COLOR
+ for raw_word in raw_words
+ ]
else:
# Pretext stops mid-word
if m.group("esc") != RESET_COLOR:
# Add the rest of the current word, with a reset after it
words[-1] += m.group("esc") + raw_words[0] + RESET_COLOR
# Add the subsequent colored words:
- words += [m.group("esc") + raw_word
- + RESET_COLOR for raw_word in raw_words[1:]]
+ words += [
+ m.group("esc") + raw_word + RESET_COLOR
+ for raw_word in raw_words[1:]
+ ]
else:
# Caught a mid-word escape sequence
words[-1] += raw_words[0]
words += raw_words[1:]
- if (m.group("text")[-1] != " " and m.group("posttext") != ""
- and m.group("posttext")[0] != " "):
+ if (
+ m.group("text")[-1] != " "
+ and m.group("posttext") != ""
+ and m.group("posttext")[0] != " "
+ ):
# reset falls mid-word
post_text = m.group("posttext").split()
words[-1] += post_text[0]
@@ -883,12 +894,8 @@ def split_into_lines(string, width_tuple):
# (optimistically) add the next word to check the fit
pot_substr = " ".join([next_substr, words[i]])
# Find out if the pot(ential)_substr fits into the next substring.
- fits_first = (
- len(result) == 0 and color_len(pot_substr) <= first_width
- )
- fits_middle = (
- len(result) != 0 and color_len(pot_substr) <= middle_width
- )
+ fits_first = len(result) == 0 and color_len(pot_substr) <= first_width
+ fits_middle = len(result) != 0 and color_len(pot_substr) <= middle_width
if fits_first or fits_middle:
# Fitted(!) let's try and add another word before appending
next_substr = pot_substr
@@ -917,8 +924,7 @@ def split_into_lines(string, width_tuple):
result.append(this_line)
next_substr = next_line
else:
- this_line, next_line = color_split(pot_substr,
- middle_width)
+ this_line, next_line = color_split(pot_substr, middle_width)
result.append(this_line)
next_substr = next_line
previous_fit = color_len(next_substr) <= middle_width
@@ -930,7 +936,7 @@ def split_into_lines(string, width_tuple):
# `middle_width`. Append an empty substring as the new last substring if
# the last substring is too long.
if not color_len(next_substr) <= last_width:
- result.append('')
+ result.append("")
return result
@@ -949,7 +955,7 @@ def print_column_layout(
With subsequent lines (i.e. {lhs1}, {rhs1} onwards) being the
rest of contents, wrapped if the width would be otherwise exceeded.
"""
- if right["prefix"] + right["contents"] + right["suffix"] == '':
+ if right["prefix"] + right["contents"] + right["suffix"] == "":
# No right hand information, so we don't need a separator.
separator = ""
first_line_no_wrap = (
@@ -969,13 +975,16 @@ def print_column_layout(
# Wrap into columns
if "width" not in left or "width" not in right:
# If widths have not been defined, set to share space.
- left["width"] = (max_width - len(indent_str)
- - color_len(separator)) // 2
- right["width"] = (max_width - len(indent_str)
- - color_len(separator)) // 2
+ left["width"] = (
+ max_width - len(indent_str) - color_len(separator)
+ ) // 2
+ right["width"] = (
+ max_width - len(indent_str) - color_len(separator)
+ ) // 2
# On the first line, account for suffix as well as prefix
left_width_tuple = (
- left["width"] - color_len(left["prefix"])
+ left["width"]
+ - color_len(left["prefix"])
- color_len(left["suffix"]),
left["width"] - color_len(left["prefix"]),
left["width"] - color_len(left["prefix"]),
@@ -983,7 +992,8 @@ def print_column_layout(
left_split = split_into_lines(left["contents"], left_width_tuple)
right_width_tuple = (
- right["width"] - color_len(right["prefix"])
+ right["width"]
+ - color_len(right["prefix"])
- color_len(right["suffix"]),
right["width"] - color_len(right["prefix"]),
right["width"] - color_len(right["prefix"]),
@@ -1046,8 +1056,9 @@ def print_column_layout(
right_part_len = 0
# Padding until end of column
- padding = right["width"] - color_len(right["prefix"]) \
- - right_part_len
+ padding = (
+ right["width"] - color_len(right["prefix"]) - right_part_len
+ )
# Remove some padding on the first line to display
# length
if i == 0:
@@ -1081,7 +1092,7 @@ def print_newline_layout(
If {lhs0} would go over the maximum width, the subsequent lines are
indented a second time for ease of reading.
"""
- if right["prefix"] + right["contents"] + right["suffix"] == '':
+ if right["prefix"] + right["contents"] + right["suffix"] == "":
# No right hand information, so we don't need a separator.
separator = ""
first_line_no_wrap = (
@@ -1141,25 +1152,28 @@ def _field_diff(field, old, old_fmt, new, new_fmt):
newval = new.get(field)
# If no change, abort.
- if isinstance(oldval, float) and isinstance(newval, float) and \
- abs(oldval - newval) < FLOAT_EPSILON:
+ if (
+ isinstance(oldval, float)
+ and isinstance(newval, float)
+ and abs(oldval - newval) < FLOAT_EPSILON
+ ):
return None
elif oldval == newval:
return None
# Get formatted values for output.
- oldstr = old_fmt.get(field, '')
- newstr = new_fmt.get(field, '')
+ oldstr = old_fmt.get(field, "")
+ newstr = new_fmt.get(field, "")
# For strings, highlight changes. For others, colorize the whole
# thing.
if isinstance(oldval, str):
oldstr, newstr = colordiff(oldval, newstr)
else:
- oldstr = colorize('text_error', oldstr)
- newstr = colorize('text_error', newstr)
+ oldstr = colorize("text_error", oldstr)
+ newstr = colorize("text_error", newstr)
- return f'{oldstr} -> {newstr}'
+ return f"{oldstr} -> {newstr}"
def show_model_changes(new, old=None, fields=None, always=False):
@@ -1183,29 +1197,28 @@ def show_model_changes(new, old=None, fields=None, always=False):
changes = []
for field in old:
# Subset of the fields. Never show mtime.
- if field == 'mtime' or (fields and field not in fields):
+ if field == "mtime" or (fields and field not in fields):
continue
# Detect and show difference for this field.
line = _field_diff(field, old, old_fmt, new, new_fmt)
if line:
- changes.append(f' {field}: {line}')
+ changes.append(f" {field}: {line}")
# New fields.
for field in set(new) - set(old):
if fields and field not in fields:
continue
- changes.append(' {}: {}'.format(
- field,
- colorize('text_highlight', new_fmt[field])
- ))
+ changes.append(
+ " {}: {}".format(field, colorize("text_highlight", new_fmt[field]))
+ )
# Print changes.
if changes or always:
print_(format(old))
if changes:
- print_('\n'.join(changes))
+ print_("\n".join(changes))
return bool(changes)
@@ -1232,31 +1245,34 @@ def show_path_changes(path_changes):
destinations = list(map(util.displayable_path, destinations))
# Calculate widths for terminal split
- col_width = (term_width() - len(' -> ')) // 2
+ col_width = (term_width() - len(" -> ")) // 2
max_width = len(max(sources + destinations, key=len))
if max_width > col_width:
# Print every change over two lines
for source, dest in zip(sources, destinations):
color_source, color_dest = colordiff(source, dest)
- print_('{0} \n -> {1}'.format(color_source, color_dest))
+ print_("{0} \n -> {1}".format(color_source, color_dest))
else:
# Print every change on a single line, and add a header
- title_pad = max_width - len('Source ') + len(' -> ')
+ title_pad = max_width - len("Source ") + len(" -> ")
- print_('Source {0} Destination'.format(' ' * title_pad))
+ print_("Source {0} Destination".format(" " * title_pad))
for source, dest in zip(sources, destinations):
pad = max_width - len(source)
color_source, color_dest = colordiff(source, dest)
- print_('{0} {1} -> {2}'.format(
- color_source,
- ' ' * pad,
- color_dest,
- ))
+ print_(
+ "{0} {1} -> {2}".format(
+ color_source,
+ " " * pad,
+ color_dest,
+ )
+ )
# Helper functions for option parsing.
+
def _store_dict(option, opt_str, value, parser):
"""Custom action callback to parse options which have ``key=value``
pairs as values. All such pairs passed for this option are
@@ -1272,13 +1288,15 @@ def _store_dict(option, opt_str, value, parser):
option_values = getattr(parser.values, dest)
try:
- key, value = value.split('=', 1)
+ key, value = value.split("=", 1)
if not (key and value):
raise ValueError
except ValueError:
raise UserError(
- "supplied argument `{}' is not of the form `key=value'"
- .format(value))
+ "supplied argument `{}' is not of the form `key=value'".format(
+ value
+ )
+ )
option_values[key] = value
@@ -1306,20 +1324,29 @@ def __init__(self, *args, **kwargs):
# us to check whether it has been specified on the CLI - bypassing the
# fact that arguments may be in any order
- def add_album_option(self, flags=('-a', '--album')):
+ def add_album_option(self, flags=("-a", "--album")):
"""Add a -a/--album option to match albums instead of tracks.
If used then the format option can auto-detect whether we're setting
the format for items or albums.
Sets the album property on the options extracted from the CLI.
"""
- album = optparse.Option(*flags, action='store_true',
- help='match albums instead of tracks')
+ album = optparse.Option(
+ *flags, action="store_true", help="match albums instead of tracks"
+ )
self.add_option(album)
self._album_flags = set(flags)
- def _set_format(self, option, opt_str, value, parser, target=None,
- fmt=None, store_true=False):
+ def _set_format(
+ self,
+ option,
+ opt_str,
+ value,
+ parser,
+ target=None,
+ fmt=None,
+ store_true=False,
+ ):
"""Internal callback that sets the correct format while parsing CLI
arguments.
"""
@@ -1330,9 +1357,9 @@ def _set_format(self, option, opt_str, value, parser, target=None,
if fmt:
value = fmt
elif value:
- value, = decargs([value])
+ (value,) = decargs([value])
else:
- value = ''
+ value = ""
parser.values.format = value
if target:
@@ -1352,7 +1379,7 @@ def _set_format(self, option, opt_str, value, parser, target=None,
config[library.Item._format_config_key].set(value)
config[library.Album._format_config_key].set(value)
- def add_path_option(self, flags=('-p', '--path')):
+ def add_path_option(self, flags=("-p", "--path")):
"""Add a -p/--path option to display the path instead of the default
format.
@@ -1362,14 +1389,17 @@ def add_path_option(self, flags=('-p', '--path')):
Sets the format property to '$path' on the options extracted from the
CLI.
"""
- path = optparse.Option(*flags, nargs=0, action='callback',
- callback=self._set_format,
- callback_kwargs={'fmt': '$path',
- 'store_true': True},
- help='print paths for matched items or albums')
+ path = optparse.Option(
+ *flags,
+ nargs=0,
+ action="callback",
+ callback=self._set_format,
+ callback_kwargs={"fmt": "$path", "store_true": True},
+ help="print paths for matched items or albums",
+ )
self.add_option(path)
- def add_format_option(self, flags=('-f', '--format'), target=None):
+ def add_format_option(self, flags=("-f", "--format"), target=None):
"""Add -f/--format option to print some LibModel instances with a
custom format.
@@ -1387,19 +1417,20 @@ def add_format_option(self, flags=('-f', '--format'), target=None):
kwargs = {}
if target:
if isinstance(target, str):
- target = {'item': library.Item,
- 'album': library.Album}[target]
- kwargs['target'] = target
-
- opt = optparse.Option(*flags, action='callback',
- callback=self._set_format,
- callback_kwargs=kwargs,
- help='print with custom format')
+ target = {"item": library.Item, "album": library.Album}[target]
+ kwargs["target"] = target
+
+ opt = optparse.Option(
+ *flags,
+ action="callback",
+ callback=self._set_format,
+ callback_kwargs=kwargs,
+ help="print with custom format",
+ )
self.add_option(opt)
def add_all_common_options(self):
- """Add album, path and format options.
- """
+ """Add album, path and format options."""
self.add_album_option()
self.add_path_option()
self.add_format_option()
@@ -1413,12 +1444,13 @@ def add_all_common_options(self):
# There you will also find a better description of the code and a more
# succinct example program.
+
class Subcommand:
"""A subcommand of a root command-line application that may be
invoked by a SubcommandOptionParser.
"""
- def __init__(self, name, parser=None, help='', aliases=(), hide=False):
+ def __init__(self, name, parser=None, help="", aliases=(), hide=False):
"""Creates a new subcommand. name is the primary way to invoke
the subcommand; aliases are alternate names. parser is an
OptionParser responsible for parsing the subcommand's options.
@@ -1445,8 +1477,9 @@ def root_parser(self):
@root_parser.setter
def root_parser(self, root_parser):
self._root_parser = root_parser
- self.parser.prog = '{} {}'.format(
- as_string(root_parser.get_prog_name()), self.name)
+ self.parser.prog = "{} {}".format(
+ as_string(root_parser.get_prog_name()), self.name
+ )
class SubcommandsOptionParser(CommonOptionsParser):
@@ -1460,11 +1493,13 @@ def __init__(self, *args, **kwargs):
to subcommands, a sequence of Subcommand objects.
"""
# A more helpful default usage.
- if 'usage' not in kwargs:
- kwargs['usage'] = """
+ if "usage" not in kwargs:
+ kwargs[
+ "usage"
+ ] = """
%prog COMMAND [ARGS...]
%prog help COMMAND"""
- kwargs['add_help_option'] = False
+ kwargs["add_help_option"] = False
# Super constructor.
super().__init__(*args, **kwargs)
@@ -1475,8 +1510,7 @@ def __init__(self, *args, **kwargs):
self.subcommands = []
def add_subcommand(self, *cmds):
- """Adds a Subcommand object to the parser's list of commands.
- """
+ """Adds a Subcommand object to the parser's list of commands."""
for cmd in cmds:
cmd.root_parser = self
self.subcommands.append(cmd)
@@ -1490,7 +1524,7 @@ def format_help(self, formatter=None):
# Subcommands header.
result = ["\n"]
- result.append(formatter.format_heading('Commands'))
+ result.append(formatter.format_heading("Commands"))
formatter.indent()
# Generate the display names (including aliases).
@@ -1502,7 +1536,7 @@ def format_help(self, formatter=None):
for subcommand in subcommands:
name = subcommand.name
if subcommand.aliases:
- name += ' (%s)' % ', '.join(subcommand.aliases)
+ name += " (%s)" % ", ".join(subcommand.aliases)
disp_names.append(name)
# Set the help position based on the max width.
@@ -1518,16 +1552,24 @@ def format_help(self, formatter=None):
name = "%*s%s\n" % (formatter.current_indent, "", name)
indent_first = help_position
else:
- name = "%*s%-*s " % (formatter.current_indent, "",
- name_width, name)
+ name = "%*s%-*s " % (
+ formatter.current_indent,
+ "",
+ name_width,
+ name,
+ )
indent_first = 0
result.append(name)
help_width = formatter.width - help_position
help_lines = textwrap.wrap(subcommand.help, help_width)
- help_line = help_lines[0] if help_lines else ''
+ help_line = help_lines[0] if help_lines else ""
result.append("%*s%s\n" % (indent_first, "", help_line))
- result.extend(["%*s%s\n" % (help_position, "", line)
- for line in help_lines[1:]])
+ result.extend(
+ [
+ "%*s%s\n" % (help_position, "", line)
+ for line in help_lines[1:]
+ ]
+ )
formatter.dedent()
# Concatenate the original help message with the subcommand
@@ -1540,8 +1582,7 @@ def _subcommand_for_name(self, name):
an alias. If no subcommand matches, returns None.
"""
for subcommand in self.subcommands:
- if name == subcommand.name or \
- name in subcommand.aliases:
+ if name == subcommand.name or name in subcommand.aliases:
return subcommand
return None
@@ -1553,9 +1594,9 @@ def parse_global_options(self, args):
# Force the help command
if options.help:
- subargs = ['help']
+ subargs = ["help"]
elif options.version:
- subargs = ['version']
+ subargs = ["version"]
return options, subargs
def parse_subcommand(self, args):
@@ -1565,7 +1606,7 @@ def parse_subcommand(self, args):
"""
# Help is default command
if not args:
- args = ['help']
+ args = ["help"]
cmdname = args.pop(0)
subcommand = self._subcommand_for_name(cmdname)
@@ -1576,23 +1617,24 @@ def parse_subcommand(self, args):
return subcommand, suboptions, subargs
-optparse.Option.ALWAYS_TYPED_ACTIONS += ('callback',)
+optparse.Option.ALWAYS_TYPED_ACTIONS += ("callback",)
# The main entry point and bootstrapping.
+
def _load_plugins(options, config):
- """Load the plugins specified on the command line or in the configuration.
- """
- paths = config['pluginpath'].as_str_seq(split=False)
+ """Load the plugins specified on the command line or in the configuration."""
+ paths = config["pluginpath"].as_str_seq(split=False)
paths = [util.normpath(p) for p in paths]
- log.debug('plugin paths: {0}', util.displayable_path(paths))
+ log.debug("plugin paths: {0}", util.displayable_path(paths))
# On Python 3, the search paths need to be unicode.
paths = [util.py3_path(p) for p in paths]
# Extend the `beetsplug` package to include the plugin paths.
import beetsplug
+
beetsplug.__path__ = paths + list(beetsplug.__path__)
# For backwards compatibility, also support plugin paths that
@@ -1601,15 +1643,17 @@ def _load_plugins(options, config):
# If we were given any plugins on the command line, use those.
if options.plugins is not None:
- plugin_list = (options.plugins.split(',')
- if len(options.plugins) > 0 else [])
+ plugin_list = (
+ options.plugins.split(",") if len(options.plugins) > 0 else []
+ )
else:
- plugin_list = config['plugins'].as_str_seq()
+ plugin_list = config["plugins"].as_str_seq()
# Exclude any plugins that were specified on the command line
if options.exclude is not None:
- plugin_list = [p for p in plugin_list
- if p not in options.exclude.split(',')]
+ plugin_list = [
+ p for p in plugin_list if p not in options.exclude.split(",")
+ ]
plugins.load_plugins(plugin_list)
return plugins
@@ -1654,12 +1698,11 @@ def _setup(options, lib=None):
def _configure(options):
- """Amend the global configuration object with command line options.
- """
+ """Amend the global configuration object with command line options."""
# Add any additional config files specified with --config. This
# special handling lets specified plugins get loaded before we
# finish parsing the command line.
- if getattr(options, 'config', None) is not None:
+ if getattr(options, "config", None) is not None:
overlay_path = options.config
del options.config
config.set_file(overlay_path)
@@ -1668,62 +1711,67 @@ def _configure(options):
config.set_args(options)
# Configure the logger.
- if config['verbose'].get(int):
+ if config["verbose"].get(int):
log.set_global_level(logging.DEBUG)
else:
log.set_global_level(logging.INFO)
if overlay_path:
- log.debug('overlaying configuration: {0}',
- util.displayable_path(overlay_path))
+ log.debug(
+ "overlaying configuration: {0}", util.displayable_path(overlay_path)
+ )
config_path = config.user_config_path()
if os.path.isfile(config_path):
- log.debug('user configuration: {0}',
- util.displayable_path(config_path))
+ log.debug("user configuration: {0}", util.displayable_path(config_path))
else:
- log.debug('no user configuration found at {0}',
- util.displayable_path(config_path))
+ log.debug(
+ "no user configuration found at {0}",
+ util.displayable_path(config_path),
+ )
- log.debug('data directory: {0}',
- util.displayable_path(config.config_dir()))
+ log.debug("data directory: {0}", util.displayable_path(config.config_dir()))
return config
def _ensure_db_directory_exists(path):
- if path == b':memory:': # in memory db
+ if path == b":memory:": # in memory db
return
newpath = os.path.dirname(path)
if not os.path.isdir(newpath):
- if input_yn("The database directory {} does not \
- exist. Create it (Y/n)?"
- .format(util.displayable_path(newpath))):
+ if input_yn(
+ "The database directory {} does not \
+ exist. Create it (Y/n)?".format(
+ util.displayable_path(newpath)
+ )
+ ):
os.makedirs(newpath)
def _open_library(config):
- """Create a new library instance from the configuration.
- """
- dbpath = util.bytestring_path(config['library'].as_filename())
+ """Create a new library instance from the configuration."""
+ dbpath = util.bytestring_path(config["library"].as_filename())
_ensure_db_directory_exists(dbpath)
try:
lib = library.Library(
dbpath,
- config['directory'].as_filename(),
+ config["directory"].as_filename(),
get_path_formats(),
get_replacements(),
)
lib.get_item(0) # Test database connection.
except (sqlite3.OperationalError, sqlite3.DatabaseError) as db_error:
- log.debug('{}', traceback.format_exc())
- raise UserError("database file {} cannot not be opened: {}".format(
- util.displayable_path(dbpath),
- db_error
- ))
- log.debug('library database: {0}\n'
- 'library directory: {1}',
- util.displayable_path(lib.path),
- util.displayable_path(lib.directory))
+ log.debug("{}", traceback.format_exc())
+ raise UserError(
+ "database file {} cannot not be opened: {}".format(
+ util.displayable_path(dbpath), db_error
+ )
+ )
+ log.debug(
+ "library database: {0}\n" "library directory: {1}",
+ util.displayable_path(lib.path),
+ util.displayable_path(lib.directory),
+ )
return lib
@@ -1732,33 +1780,65 @@ def _raw_main(args, lib=None):
handling.
"""
parser = SubcommandsOptionParser()
- parser.add_format_option(flags=('--format-item',), target=library.Item)
- parser.add_format_option(flags=('--format-album',), target=library.Album)
- parser.add_option('-l', '--library', dest='library',
- help='library database file to use')
- parser.add_option('-d', '--directory', dest='directory',
- help="destination music directory")
- parser.add_option('-v', '--verbose', dest='verbose', action='count',
- help='log more details (use twice for even more)')
- parser.add_option('-c', '--config', dest='config',
- help='path to configuration file')
- parser.add_option('-p', '--plugins', dest='plugins',
- help='a comma-separated list of plugins to load')
- parser.add_option('-P', '--disable-plugins', dest='exclude',
- help='a comma-separated list of plugins to disable')
- parser.add_option('-h', '--help', dest='help', action='store_true',
- help='show this help message and exit')
- parser.add_option('--version', dest='version', action='store_true',
- help=optparse.SUPPRESS_HELP)
+ parser.add_format_option(flags=("--format-item",), target=library.Item)
+ parser.add_format_option(flags=("--format-album",), target=library.Album)
+ parser.add_option(
+ "-l", "--library", dest="library", help="library database file to use"
+ )
+ parser.add_option(
+ "-d",
+ "--directory",
+ dest="directory",
+ help="destination music directory",
+ )
+ parser.add_option(
+ "-v",
+ "--verbose",
+ dest="verbose",
+ action="count",
+ help="log more details (use twice for even more)",
+ )
+ parser.add_option(
+ "-c", "--config", dest="config", help="path to configuration file"
+ )
+ parser.add_option(
+ "-p",
+ "--plugins",
+ dest="plugins",
+ help="a comma-separated list of plugins to load",
+ )
+ parser.add_option(
+ "-P",
+ "--disable-plugins",
+ dest="exclude",
+ help="a comma-separated list of plugins to disable",
+ )
+ parser.add_option(
+ "-h",
+ "--help",
+ dest="help",
+ action="store_true",
+ help="show this help message and exit",
+ )
+ parser.add_option(
+ "--version",
+ dest="version",
+ action="store_true",
+ help=optparse.SUPPRESS_HELP,
+ )
options, subargs = parser.parse_global_options(args)
# Special case for the `config --edit` command: bypass _setup so
# that an invalid configuration does not prevent the editor from
# starting.
- if subargs and subargs[0] == 'config' \
- and ('-e' in subargs or '--edit' in subargs):
+ if (
+ subargs
+ and subargs[0] == "config"
+ and ("-e" in subargs or "--edit" in subargs)
+ ):
from beets.ui.commands import config_edit
+
return config_edit()
test_lib = bool(lib)
@@ -1768,7 +1848,7 @@ def _raw_main(args, lib=None):
subcommand, suboptions, subargs = parser.parse_subcommand(subargs)
subcommand.func(lib, suboptions, subargs)
- plugins.send('cli_exit', lib=lib)
+ plugins.send("cli_exit", lib=lib)
if not test_lib:
# Clean up the library unless it came from the test harness.
lib._close()
@@ -1782,7 +1862,7 @@ def main(args=None):
_raw_main(args)
except UserError as exc:
message = exc.args[0] if exc.args else None
- log.error('error: {0}', message)
+ log.error("error: {0}", message)
sys.exit(1)
except util.HumanReadableException as exc:
exc.log(log)
@@ -1790,14 +1870,14 @@ def main(args=None):
except library.FileOperationError as exc:
# These errors have reasonable human-readable descriptions, but
# we still want to log their tracebacks for debugging.
- log.debug('{}', traceback.format_exc())
- log.error('{}', exc)
+ log.debug("{}", traceback.format_exc())
+ log.error("{}", exc)
sys.exit(1)
except confuse.ConfigError as exc:
- log.error('configuration error: {0}', exc)
+ log.error("configuration error: {0}", exc)
sys.exit(1)
except db_query.InvalidQueryError as exc:
- log.error('invalid query: {0}', exc)
+ log.error("invalid query: {0}", exc)
sys.exit(1)
except OSError as exc:
if exc.errno == errno.EPIPE:
@@ -1807,11 +1887,11 @@ def main(args=None):
raise
except KeyboardInterrupt:
# Silently ignore ^C except in verbose mode.
- log.debug('{}', traceback.format_exc())
+ log.debug("{}", traceback.format_exc())
except db.DBAccessError as exc:
log.error(
- 'database access error: {0}\n'
- 'the library file might have a permissions problem',
- exc
+ "database access error: {0}\n"
+ "the library file might have a permissions problem",
+ exc,
)
sys.exit(1)
diff --git a/beets/ui/commands.py b/beets/ui/commands.py
index 7d39c7b9b1..ad4f7821db 100755
--- a/beets/ui/commands.py
+++ b/beets/ui/commands.py
@@ -19,34 +19,38 @@
import os
import re
-from platform import python_version
-from collections import namedtuple, Counter
+from collections import Counter, namedtuple
from itertools import chain
+from platform import python_version
from typing import Sequence
import beets
-from beets import ui
-from beets.ui import print_, input_, decargs, show_path_changes, \
- print_newline_layout, print_column_layout
-from beets import autotag
-from beets.autotag import Recommendation
-from beets.autotag import hooks
-from beets import plugins
-from beets import importer
-from beets import util
-from beets.util import syspath, normpath, ancestry, displayable_path, \
- MoveOperation, functemplate
-from beets import library
-from beets import config
-from beets import logging
+from beets import autotag, config, importer, library, logging, plugins, ui, util
+from beets.autotag import Recommendation, hooks
+from beets.ui import (
+ decargs,
+ input_,
+ print_,
+ print_column_layout,
+ print_newline_layout,
+ show_path_changes,
+)
+from beets.util import (
+ MoveOperation,
+ ancestry,
+ displayable_path,
+ functemplate,
+ normpath,
+ syspath,
+)
from . import _store_dict
-VARIOUS_ARTISTS = 'Various Artists'
-PromptChoice = namedtuple('PromptChoice', ['short', 'long', 'callback'])
+VARIOUS_ARTISTS = "Various Artists"
+PromptChoice = namedtuple("PromptChoice", ["short", "long", "callback"])
# Global logger.
-log = logging.getLogger('beets')
+log = logging.getLogger("beets")
# The list of default subcommands. This is populated with Subcommand
# objects that can be fed to a SubcommandsOptionParser.
@@ -55,6 +59,7 @@
# Utilities.
+
def _do_query(lib, query, album, also_items=True):
"""For commands that operate on matched items, performs a query
and returns a list of matching items and a list of matching
@@ -74,9 +79,9 @@ def _do_query(lib, query, album, also_items=True):
items = list(lib.items(query))
if album and not albums:
- raise ui.UserError('No matching albums found.')
+ raise ui.UserError("No matching albums found.")
elif not album and not items:
- raise ui.UserError('No matching items found.')
+ raise ui.UserError("No matching items found.")
return items, albums
@@ -107,31 +112,34 @@ def _parse_logfiles(logfiles):
try:
yield from _paths_from_logfile(syspath(normpath(logfile)))
except ValueError as err:
- raise ui.UserError('malformed logfile {}: {}'.format(
- util.displayable_path(logfile),
- str(err)
- )) from err
+ raise ui.UserError(
+ "malformed logfile {}: {}".format(
+ util.displayable_path(logfile), str(err)
+ )
+ ) from err
except IOError as err:
- raise ui.UserError('unreadable logfile {}: {}'.format(
- util.displayable_path(logfile),
- str(err)
- )) from err
+ raise ui.UserError(
+ "unreadable logfile {}: {}".format(
+ util.displayable_path(logfile), str(err)
+ )
+ ) from err
# fields: Shows a list of available fields for queries and format strings.
+
def _print_keys(query):
"""Given a SQLite query result, print the `key` field of each
returned row, with indentation of 2 spaces.
"""
for row in query:
- print_(' ' * 2 + row['key'])
+ print_(" " * 2 + row["key"])
def fields_func(lib, opts, args):
def _print_rows(names):
names.sort()
- print_(' ' + '\n '.join(names))
+ print_(" " + "\n ".join(names))
print_("Item fields:")
_print_rows(library.Item.all_keys())
@@ -141,7 +149,7 @@ def _print_rows(names):
with lib.transaction() as tx:
# The SQL uses the DISTINCT to get unique values from the query
- unique_fields = 'SELECT DISTINCT key FROM (%s)'
+ unique_fields = "SELECT DISTINCT key FROM (%s)"
print_("Item flexible attributes:")
_print_keys(tx.query(unique_fields % library.Item._flex_table))
@@ -151,8 +159,7 @@ def _print_rows(names):
fields_cmd = ui.Subcommand(
- 'fields',
- help='show fields available for queries and format strings'
+ "fields", help="show fields available for queries and format strings"
)
fields_cmd.func = fields_func
default_commands.append(fields_cmd)
@@ -160,11 +167,13 @@ def _print_rows(names):
# help: Print help text for commands
+
class HelpCommand(ui.Subcommand):
def __init__(self):
super().__init__(
- 'help', aliases=('?',),
- help='give detailed help on a specific sub-command',
+ "help",
+ aliases=("?",),
+ help="give detailed help on a specific sub-command",
)
def func(self, lib, opts, args):
@@ -185,6 +194,7 @@ def func(self, lib, opts, args):
# Importer utilities and support.
+
def disambig_string(info):
"""Generate a string for an AlbumInfo or TrackInfo object that
provides context that helps disambiguate similar-looking albums and
@@ -195,20 +205,23 @@ def disambig_string(info):
elif isinstance(info, hooks.TrackInfo):
disambig = get_singleton_disambig_fields(info)
else:
- return ''
+ return ""
- return ', '.join(disambig)
+ return ", ".join(disambig)
def get_singleton_disambig_fields(info: hooks.TrackInfo) -> Sequence[str]:
out = []
- chosen_fields = config['match']['singleton_disambig_fields'].as_str_seq()
+ chosen_fields = config["match"]["singleton_disambig_fields"].as_str_seq()
calculated_values = {
- 'index': "Index {}".format(str(info.index)),
- 'track_alt': "Track {}".format(info.track_alt),
- 'album': "[{}]".format(info.album) if
- (config['import']['singleton_album_disambig'].get() and
- info.get('album')) else '',
+ "index": "Index {}".format(str(info.index)),
+ "track_alt": "Track {}".format(info.track_alt),
+ "album": "[{}]".format(info.album)
+ if (
+ config["import"]["singleton_album_disambig"].get()
+ and info.get("album")
+ )
+ else "",
}
for field in chosen_fields:
@@ -225,10 +238,11 @@ def get_singleton_disambig_fields(info: hooks.TrackInfo) -> Sequence[str]:
def get_album_disambig_fields(info: hooks.AlbumInfo) -> Sequence[str]:
out = []
- chosen_fields = config['match']['album_disambig_fields'].as_str_seq()
+ chosen_fields = config["match"]["album_disambig_fields"].as_str_seq()
calculated_values = {
- 'media': '{}x{}'.format(info.mediums, info.media) if
- (info.mediums and info.mediums > 1) else info.media,
+ "media": "{}x{}".format(info.mediums, info.media)
+ if (info.mediums and info.mediums > 1)
+ else info.media,
}
for field in chosen_fields:
@@ -270,13 +284,13 @@ def penalty_string(distance, limit=None):
"""
penalties = []
for key in distance.keys():
- key = key.replace('album_', '')
- key = key.replace('track_', '')
- key = key.replace('_', ' ')
+ key = key.replace("album_", "")
+ key = key.replace("track_", "")
+ key = key.replace("_", " ")
penalties.append(key)
if penalties:
if limit and len(penalties) > limit:
- penalties = penalties[:limit] + ['...']
+ penalties = penalties[:limit] + ["..."]
# Prefix penalty string with U+2260: Not Equal To
penalty_string = "\u2260 {}".format(", ".join(penalties))
return ui.colorize("changed", penalty_string)
@@ -322,8 +336,9 @@ def __init__(self):
}
)
- def print_layout(self, indent, left, right, separator=" -> ",
- max_width=None):
+ def print_layout(
+ self, indent, left, right, separator=" -> ", max_width=None
+ ):
if not max_width:
# If no max_width provided, use terminal width
max_width = ui.term_width()
@@ -341,20 +356,23 @@ def show_match_header(self):
print_("")
# 'Match' line and similarity.
- print_(self.indent_header +
- f"Match ({dist_string(self.match.distance)}):")
+ print_(
+ self.indent_header + f"Match ({dist_string(self.match.distance)}):"
+ )
if self.match.info.get("album"):
# Matching an album - print that
- artist_album_str = f"{self.match.info.artist}" + \
- f" - {self.match.info.album}"
+ artist_album_str = (
+ f"{self.match.info.artist}" + f" - {self.match.info.album}"
+ )
else:
# Matching a single track
- artist_album_str = f"{self.match.info.artist}" + \
- f" - {self.match.info.title}"
+ artist_album_str = (
+ f"{self.match.info.artist}" + f" - {self.match.info.title}"
+ )
print_(
- self.indent_header +
- dist_colorize(artist_album_str, self.match.distance)
+ self.indent_header
+ + dist_colorize(artist_album_str, self.match.distance)
)
# Penalties.
@@ -455,12 +473,11 @@ def format_index(self, track_info):
index = medium_index = track_info.track
medium = track_info.disc
mediums = track_info.disctotal
- if config['per_disc_numbering']:
+ if config["per_disc_numbering"]:
if mediums and mediums > 1:
- return f'{medium}-{medium_index}'
+ return f"{medium}-{medium_index}"
else:
- return str(medium_index if medium_index is not None
- else index)
+ return str(medium_index if medium_index is not None else index)
else:
return str(index)
@@ -532,14 +549,17 @@ def make_line(self, item, track_info):
appropriately. Returns (lhs, rhs) for column printing.
"""
# Track titles.
- lhs_title, rhs_title, diff_title = \
- self.make_track_titles(item, track_info)
+ lhs_title, rhs_title, diff_title = self.make_track_titles(
+ item, track_info
+ )
# Track number change.
- lhs_track, rhs_track, diff_track = \
- self.make_track_numbers(item, track_info)
+ lhs_track, rhs_track, diff_track = self.make_track_numbers(
+ item, track_info
+ )
# Length change.
- lhs_length, rhs_length, diff_length = \
- self.make_track_lengths(item, track_info)
+ lhs_length, rhs_length, diff_length = self.make_track_lengths(
+ item, track_info
+ )
changed = diff_title or diff_track or diff_length
@@ -580,9 +600,9 @@ def get_width(side):
try:
return len(
ui.uncolorize(
- " ".join([side["prefix"],
- side["contents"],
- side["suffix"]])
+ " ".join(
+ [side["prefix"], side["contents"], side["suffix"]]
+ )
)
)
except KeyError:
@@ -641,8 +661,7 @@ def show_match_tracks(self):
# match is an AlbumMatch named tuple, mapping is a dict
# Sort the pairs by the track_info index (at index 1 of the namedtuple)
pairs = list(self.match.mapping.items())
- pairs.sort(
- key=lambda item_and_track_info: item_and_track_info[1].index)
+ pairs.sort(key=lambda item_and_track_info: item_and_track_info[1].index)
# Build up LHS and RHS for track difference display. The `lines` list
# contains `(left, right)` tuples.
lines = []
@@ -650,8 +669,7 @@ def show_match_tracks(self):
for item, track_info in pairs:
# If the track is the first on a new medium, show medium
# number and title.
- if medium != track_info.medium or \
- disctitle != track_info.disctitle:
+ if medium != track_info.medium or disctitle != track_info.disctitle:
# Create header for new medium
header = self.make_medium_info_line(track_info)
if header != "":
@@ -706,8 +724,9 @@ def show_change(cur_artist, cur_album, match):
album's tags are changed according to `match`, which must be an AlbumMatch
object.
"""
- change = AlbumChange(cur_artist=cur_artist, cur_album=cur_album,
- match=match)
+ change = AlbumChange(
+ cur_artist=cur_artist, cur_album=cur_album, match=match
+ )
# Print the match header.
change.show_match_header()
@@ -723,8 +742,9 @@ def show_item_change(item, match):
"""Print out the change that would occur by tagging `item` with the
metadata from `match`, a TrackMatch object.
"""
- change = TrackChange(cur_artist=item.artist, cur_title=item.title,
- match=match)
+ change = TrackChange(
+ cur_artist=item.artist, cur_title=item.title, match=match
+ )
# Print the match header.
change.show_match_header()
# Print the match details.
@@ -753,23 +773,24 @@ def summarize_items(items, singleton):
# Enumerate all the formats by decreasing frequencies:
for fmt, count in sorted(
format_counts.items(),
- key=lambda fmt_and_count: (-fmt_and_count[1], fmt_and_count[0])
+ key=lambda fmt_and_count: (-fmt_and_count[1], fmt_and_count[0]),
):
- summary_parts.append(f'{fmt} {count}')
+ summary_parts.append(f"{fmt} {count}")
if items:
average_bitrate = sum([item.bitrate for item in items]) / len(items)
total_duration = sum([item.length for item in items])
total_filesize = sum([item.filesize for item in items])
- summary_parts.append('{}kbps'.format(int(average_bitrate / 1000)))
+ summary_parts.append("{}kbps".format(int(average_bitrate / 1000)))
if items[0].format == "FLAC":
- sample_bits = '{}kHz/{} bit'.format(
- round(int(items[0].samplerate) / 1000, 1), items[0].bitdepth)
+ sample_bits = "{}kHz/{} bit".format(
+ round(int(items[0].samplerate) / 1000, 1), items[0].bitdepth
+ )
summary_parts.append(sample_bits)
summary_parts.append(ui.human_seconds_short(total_duration))
summary_parts.append(ui.human_bytes(total_filesize))
- return ', '.join(summary_parts)
+ return ", ".join(summary_parts)
def _summary_judgment(rec):
@@ -780,35 +801,46 @@ def _summary_judgment(rec):
summary judgment is made.
"""
- if config['import']['quiet']:
+ if config["import"]["quiet"]:
if rec == Recommendation.strong:
return importer.action.APPLY
else:
- action = config['import']['quiet_fallback'].as_choice({
- 'skip': importer.action.SKIP,
- 'asis': importer.action.ASIS,
- })
- elif config['import']['timid']:
+ action = config["import"]["quiet_fallback"].as_choice(
+ {
+ "skip": importer.action.SKIP,
+ "asis": importer.action.ASIS,
+ }
+ )
+ elif config["import"]["timid"]:
return None
elif rec == Recommendation.none:
- action = config['import']['none_rec_action'].as_choice({
- 'skip': importer.action.SKIP,
- 'asis': importer.action.ASIS,
- 'ask': None,
- })
+ action = config["import"]["none_rec_action"].as_choice(
+ {
+ "skip": importer.action.SKIP,
+ "asis": importer.action.ASIS,
+ "ask": None,
+ }
+ )
else:
return None
if action == importer.action.SKIP:
- print_('Skipping.')
+ print_("Skipping.")
elif action == importer.action.ASIS:
- print_('Importing as-is.')
+ print_("Importing as-is.")
return action
-def choose_candidate(candidates, singleton, rec, cur_artist=None,
- cur_album=None, item=None, itemcount=None,
- choices=[]):
+def choose_candidate(
+ candidates,
+ singleton,
+ rec,
+ cur_artist=None,
+ cur_album=None,
+ item=None,
+ itemcount=None,
+ choices=[],
+):
"""Given a sorted list of candidates, ask the user for a selection
of which candidate to use. Applies to both full albums and
singletons (tracks). Candidates are either AlbumMatch or TrackMatch
@@ -839,10 +871,11 @@ def choose_candidate(candidates, singleton, rec, cur_artist=None,
if singleton:
print_("No matching recordings found.")
else:
- print_("No matching release found for {} tracks."
- .format(itemcount))
- print_('For help, see: '
- 'https://beets.readthedocs.org/en/latest/faq.html#nomatch')
+ print_("No matching release found for {} tracks.".format(itemcount))
+ print_(
+ "For help, see: "
+ "https://beets.readthedocs.org/en/latest/faq.html#nomatch"
+ )
sel = ui.input_options(choice_opts)
if sel in choice_actions:
return choice_actions[sel]
@@ -862,13 +895,15 @@ def choose_candidate(candidates, singleton, rec, cur_artist=None,
if not bypass_candidates:
# Display list of candidates.
print_("")
- print_('Finding tags for {} "{} - {}".'.format(
- 'track' if singleton else 'album',
- item.artist if singleton else cur_artist,
- item.title if singleton else cur_album,
- ))
+ print_(
+ 'Finding tags for {} "{} - {}".'.format(
+ "track" if singleton else "album",
+ item.artist if singleton else cur_artist,
+ item.title if singleton else cur_album,
+ )
+ )
- print_(ui.indent(2) + 'Candidates:')
+ print_(ui.indent(2) + "Candidates:")
for i, match in enumerate(candidates):
# Index, metadata, and distance.
index0 = "{0}.".format(i + 1)
@@ -897,9 +932,8 @@ def choose_candidate(candidates, singleton, rec, cur_artist=None,
print_(ui.indent(13) + disambig)
# Ask the user for a choice.
- sel = ui.input_options(choice_opts,
- numrange=(1, len(candidates)))
- if sel == 'm':
+ sel = ui.input_options(choice_opts, numrange=(1, len(candidates)))
+ if sel == "m":
pass
elif sel in choice_actions:
return choice_actions[sel]
@@ -918,24 +952,29 @@ def choose_candidate(candidates, singleton, rec, cur_artist=None,
show_change(cur_artist, cur_album, match)
# Exact match => tag automatically if we're not in timid mode.
- if rec == Recommendation.strong and not config['import']['timid']:
+ if rec == Recommendation.strong and not config["import"]["timid"]:
return match
# Ask for confirmation.
- default = config['import']['default_action'].as_choice({
- 'apply': 'a',
- 'skip': 's',
- 'asis': 'u',
- 'none': None,
- })
+ default = config["import"]["default_action"].as_choice(
+ {
+ "apply": "a",
+ "skip": "s",
+ "asis": "u",
+ "none": None,
+ }
+ )
if default is None:
require = True
# Bell ring when user interaction is needed.
- if config['import']['bell']:
- ui.print_('\a', end='')
- sel = ui.input_options(('Apply', 'More candidates') + choice_opts,
- require=require, default=default)
- if sel == 'a':
+ if config["import"]["bell"]:
+ ui.print_("\a", end="")
+ sel = ui.input_options(
+ ("Apply", "More candidates") + choice_opts,
+ require=require,
+ default=default,
+ )
+ if sel == "a":
return match
elif sel in choice_actions:
return choice_actions[sel]
@@ -947,13 +986,11 @@ def manual_search(session, task):
Input either an artist and album (for full albums) or artist and
track name (for singletons) for manual search.
"""
- artist = input_('Artist:').strip()
- name = input_('Album:' if task.is_album else 'Track:').strip()
+ artist = input_("Artist:").strip()
+ name = input_("Album:" if task.is_album else "Track:").strip()
if task.is_album:
- _, _, prop = autotag.tag_album(
- task.items, artist, name
- )
+ _, _, prop = autotag.tag_album(task.items, artist, name)
return prop
else:
return autotag.tag_item(task.item, artist, name)
@@ -964,28 +1001,23 @@ def manual_id(session, task):
Input an ID, either for an album ("release") or a track ("recording").
"""
- prompt = 'Enter {} ID:'.format('release' if task.is_album
- else 'recording')
+ prompt = "Enter {} ID:".format("release" if task.is_album else "recording")
search_id = input_(prompt).strip()
if task.is_album:
- _, _, prop = autotag.tag_album(
- task.items, search_ids=search_id.split()
- )
+ _, _, prop = autotag.tag_album(task.items, search_ids=search_id.split())
return prop
else:
return autotag.tag_item(task.item, search_ids=search_id.split())
def abort_action(session, task):
- """A prompt choice callback that aborts the importer.
- """
+ """A prompt choice callback that aborts the importer."""
raise importer.ImportAbort()
class TerminalImportSession(importer.ImportSession):
- """An import session that runs in a terminal.
- """
+ """An import session that runs in a terminal."""
def choose_match(self, task):
"""Given an initial autotagging of items, go through an interactive
@@ -995,24 +1027,26 @@ def choose_match(self, task):
# Show what we're tagging.
print_()
- path_str0 = displayable_path(task.paths, '\n')
- path_str = ui.colorize('import_path', path_str0)
- items_str0 = '({} items)'.format(len(task.items))
- items_str = ui.colorize('import_path_items', items_str0)
- print_(' '.join([path_str, items_str]))
+ path_str0 = displayable_path(task.paths, "\n")
+ path_str = ui.colorize("import_path", path_str0)
+ items_str0 = "({} items)".format(len(task.items))
+ items_str = ui.colorize("import_path_items", items_str0)
+ print_(" ".join([path_str, items_str]))
# Let plugins display info or prompt the user before we go through the
# process of selecting candidate.
- results = plugins.send('import_task_before_choice',
- session=self, task=task)
+ results = plugins.send(
+ "import_task_before_choice", session=self, task=task
+ )
actions = [action for action in results if action]
if len(actions) == 1:
return actions[0]
elif len(actions) > 1:
raise plugins.PluginConflictException(
- 'Only one handler for `import_task_before_choice` may return '
- 'an action.')
+ "Only one handler for `import_task_before_choice` may return "
+ "an action."
+ )
# Take immediate action if appropriate.
action = _summary_judgment(task.rec)
@@ -1031,8 +1065,13 @@ def choose_match(self, task):
# `PromptChoice`.
choices = self._get_choices(task)
choice = choose_candidate(
- task.candidates, False, task.rec, task.cur_artist,
- task.cur_album, itemcount=len(task.items), choices=choices
+ task.candidates,
+ False,
+ task.rec,
+ task.cur_artist,
+ task.cur_album,
+ itemcount=len(task.items),
+ choices=choices,
)
# Basic choices that require no more action here.
@@ -1078,8 +1117,9 @@ def choose_item(self, task):
while True:
# Ask for a choice.
choices = self._get_choices(task)
- choice = choose_candidate(candidates, True, rec, item=task.item,
- choices=choices)
+ choice = choose_candidate(
+ candidates, True, rec, item=task.item, choices=choices
+ )
if choice in (importer.action.SKIP, importer.action.ASIS):
return choice
@@ -1101,58 +1141,69 @@ def resolve_duplicate(self, task, found_duplicates):
"""Decide what to do when a new album or item seems similar to one
that's already in the library.
"""
- log.warning("This {0} is already in the library!",
- ("album" if task.is_album else "item"))
+ log.warning(
+ "This {0} is already in the library!",
+ ("album" if task.is_album else "item"),
+ )
- if config['import']['quiet']:
+ if config["import"]["quiet"]:
# In quiet mode, don't prompt -- just skip.
- log.info('Skipping.')
- sel = 's'
+ log.info("Skipping.")
+ sel = "s"
else:
# Print some detail about the existing and new items so the
# user can make an informed decision.
for duplicate in found_duplicates:
- print_("Old: " + summarize_items(
- list(duplicate.items()) if task.is_album else [duplicate],
- not task.is_album,
- ))
- if config['import']['duplicate_verbose_prompt']:
+ print_(
+ "Old: "
+ + summarize_items(
+ list(duplicate.items())
+ if task.is_album
+ else [duplicate],
+ not task.is_album,
+ )
+ )
+ if config["import"]["duplicate_verbose_prompt"]:
if task.is_album:
for dup in duplicate.items():
print(f" {dup}")
else:
print(f" {duplicate}")
- print_("New: " + summarize_items(
- task.imported_items(),
- not task.is_album,
- ))
- if config['import']['duplicate_verbose_prompt']:
+ print_(
+ "New: "
+ + summarize_items(
+ task.imported_items(),
+ not task.is_album,
+ )
+ )
+ if config["import"]["duplicate_verbose_prompt"]:
for item in task.imported_items():
print(f" {item}")
sel = ui.input_options(
- ('Skip new', 'Keep all', 'Remove old', 'Merge all')
+ ("Skip new", "Keep all", "Remove old", "Merge all")
)
- if sel == 's':
+ if sel == "s":
# Skip new.
task.set_choice(importer.action.SKIP)
- elif sel == 'k':
+ elif sel == "k":
# Keep both. Do nothing; leave the choice intact.
pass
- elif sel == 'r':
+ elif sel == "r":
# Remove old.
task.should_remove_duplicates = True
- elif sel == 'm':
+ elif sel == "m":
task.should_merge_duplicates = True
else:
assert False
def should_resume(self, path):
- return ui.input_yn("Import of the directory:\n{}\n"
- "was interrupted. Resume (Y/n)?"
- .format(displayable_path(path)))
+ return ui.input_yn(
+ "Import of the directory:\n{}\n"
+ "was interrupted. Resume (Y/n)?".format(displayable_path(path))
+ )
def _get_choices(self, task):
"""Get the list of prompt choices that should be presented to the
@@ -1172,47 +1223,61 @@ def _get_choices(self, task):
"""
# Standard, built-in choices.
choices = [
- PromptChoice('s', 'Skip',
- lambda s, t: importer.action.SKIP),
- PromptChoice('u', 'Use as-is',
- lambda s, t: importer.action.ASIS)
+ PromptChoice("s", "Skip", lambda s, t: importer.action.SKIP),
+ PromptChoice("u", "Use as-is", lambda s, t: importer.action.ASIS),
]
if task.is_album:
choices += [
- PromptChoice('t', 'as Tracks',
- lambda s, t: importer.action.TRACKS),
- PromptChoice('g', 'Group albums',
- lambda s, t: importer.action.ALBUMS),
+ PromptChoice(
+ "t", "as Tracks", lambda s, t: importer.action.TRACKS
+ ),
+ PromptChoice(
+ "g", "Group albums", lambda s, t: importer.action.ALBUMS
+ ),
]
choices += [
- PromptChoice('e', 'Enter search', manual_search),
- PromptChoice('i', 'enter Id', manual_id),
- PromptChoice('b', 'aBort', abort_action),
+ PromptChoice("e", "Enter search", manual_search),
+ PromptChoice("i", "enter Id", manual_id),
+ PromptChoice("b", "aBort", abort_action),
]
# Send the before_choose_candidate event and flatten list.
- extra_choices = list(chain(*plugins.send('before_choose_candidate',
- session=self, task=task)))
+ extra_choices = list(
+ chain(
+ *plugins.send(
+ "before_choose_candidate", session=self, task=task
+ )
+ )
+ )
# Add a "dummy" choice for the other baked-in option, for
# duplicate checking.
- all_choices = [
- PromptChoice('a', 'Apply', None),
- ] + choices + extra_choices
+ all_choices = (
+ [
+ PromptChoice("a", "Apply", None),
+ ]
+ + choices
+ + extra_choices
+ )
# Check for conflicts.
short_letters = [c.short for c in all_choices]
if len(short_letters) != len(set(short_letters)):
# Duplicate short letter has been found.
- duplicates = [i for i, count in Counter(short_letters).items()
- if count > 1]
+ duplicates = [
+ i for i, count in Counter(short_letters).items() if count > 1
+ ]
for short in duplicates:
# Keep the first of the choices, removing the rest.
dup_choices = [c for c in all_choices if c.short == short]
for c in dup_choices[1:]:
- log.warning("Prompt choice '{0}' removed due to conflict "
- "with '{1}' (short letter: '{2}')",
- c.long, dup_choices[0].long, c.short)
+ log.warning(
+ "Prompt choice '{0}' removed due to conflict "
+ "with '{1}' (short letter: '{2}')",
+ c.long,
+ dup_choices[0].long,
+ c.short,
+ )
extra_choices.remove(c)
return choices + extra_choices
@@ -1226,39 +1291,40 @@ def import_files(lib, paths, query):
query.
"""
# Check parameter consistency.
- if config['import']['quiet'] and config['import']['timid']:
+ if config["import"]["quiet"] and config["import"]["timid"]:
raise ui.UserError("can't be both quiet and timid")
# Open the log.
- if config['import']['log'].get() is not None:
- logpath = syspath(config['import']['log'].as_filename())
+ if config["import"]["log"].get() is not None:
+ logpath = syspath(config["import"]["log"].as_filename())
try:
- loghandler = logging.FileHandler(logpath, encoding='utf-8')
+ loghandler = logging.FileHandler(logpath, encoding="utf-8")
except OSError:
- raise ui.UserError("could not open log file for writing: "
- "{}".format(displayable_path(logpath)))
+ raise ui.UserError(
+ "could not open log file for writing: "
+ "{}".format(displayable_path(logpath))
+ )
else:
loghandler = None
# Never ask for input in quiet mode.
- if config['import']['resume'].get() == 'ask' and \
- config['import']['quiet']:
- config['import']['resume'] = False
+ if config["import"]["resume"].get() == "ask" and config["import"]["quiet"]:
+ config["import"]["resume"] = False
session = TerminalImportSession(lib, loghandler, paths, query)
session.run()
# Emit event.
- plugins.send('import', lib=lib, paths=paths)
+ plugins.send("import", lib=lib, paths=paths)
def import_func(lib, opts, args):
- config['import'].set_args(opts)
+ config["import"].set_args(opts)
# Special case: --copy flag suppresses import_move (which would
# otherwise take precedence).
if opts.copy:
- config['import']['move'] = False
+ config["import"]["move"] = False
if opts.library:
query = decargs(args)
@@ -1272,22 +1338,28 @@ def import_func(lib, opts, args):
paths_from_logfiles = list(_parse_logfiles(opts.from_logfiles or []))
if not paths and not paths_from_logfiles:
- raise ui.UserError('no path specified')
+ raise ui.UserError("no path specified")
# On Python 2, we used to get filenames as raw bytes, which is
# what we need. On Python 3, we need to undo the "helpful"
# conversion to Unicode strings to get the real bytestring
# filename.
- paths = [p.encode(util.arg_encoding(), 'surrogateescape')
- for p in paths]
- paths_from_logfiles = [p.encode(util.arg_encoding(), 'surrogateescape')
- for p in paths_from_logfiles]
+ paths = [
+ p.encode(util.arg_encoding(), "surrogateescape") for p in paths
+ ]
+ paths_from_logfiles = [
+ p.encode(util.arg_encoding(), "surrogateescape")
+ for p in paths_from_logfiles
+ ]
# Check the user-specified directories.
for path in paths:
if not os.path.exists(syspath(normpath(path))):
- raise ui.UserError('no such file or directory: {}'.format(
- displayable_path(path)))
+ raise ui.UserError(
+ "no such file or directory: {}".format(
+ displayable_path(path)
+ )
+ )
# Check the directories from the logfiles, but don't throw an error in
# case those paths don't exist. Maybe some of those paths have already
@@ -1295,8 +1367,11 @@ def import_func(lib, opts, args):
# suffice.
for path in paths_from_logfiles:
if not os.path.exists(syspath(normpath(path))):
- log.warning('No such file or directory: {}'.format(
- displayable_path(path)))
+ log.warning(
+ "No such file or directory: {}".format(
+ displayable_path(path)
+ )
+ )
continue
paths.append(path)
@@ -1304,109 +1379,171 @@ def import_func(lib, opts, args):
# If all paths were read from a logfile, and none of them exist, throw
# an error
if not paths:
- raise ui.UserError('none of the paths are importable')
+ raise ui.UserError("none of the paths are importable")
import_files(lib, paths, query)
import_cmd = ui.Subcommand(
- 'import', help='import new music', aliases=('imp', 'im')
+ "import", help="import new music", aliases=("imp", "im")
)
import_cmd.parser.add_option(
- '-c', '--copy', action='store_true', default=None,
- help="copy tracks into library directory (default)"
+ "-c",
+ "--copy",
+ action="store_true",
+ default=None,
+ help="copy tracks into library directory (default)",
)
import_cmd.parser.add_option(
- '-C', '--nocopy', action='store_false', dest='copy',
- help="don't copy tracks (opposite of -c)"
+ "-C",
+ "--nocopy",
+ action="store_false",
+ dest="copy",
+ help="don't copy tracks (opposite of -c)",
)
import_cmd.parser.add_option(
- '-m', '--move', action='store_true', dest='move',
- help="move tracks into the library (overrides -c)"
+ "-m",
+ "--move",
+ action="store_true",
+ dest="move",
+ help="move tracks into the library (overrides -c)",
)
import_cmd.parser.add_option(
- '-w', '--write', action='store_true', default=None,
- help="write new metadata to files' tags (default)"
+ "-w",
+ "--write",
+ action="store_true",
+ default=None,
+ help="write new metadata to files' tags (default)",
)
import_cmd.parser.add_option(
- '-W', '--nowrite', action='store_false', dest='write',
- help="don't write metadata (opposite of -w)"
+ "-W",
+ "--nowrite",
+ action="store_false",
+ dest="write",
+ help="don't write metadata (opposite of -w)",
)
import_cmd.parser.add_option(
- '-a', '--autotag', action='store_true', dest='autotag',
- help="infer tags for imported files (default)"
+ "-a",
+ "--autotag",
+ action="store_true",
+ dest="autotag",
+ help="infer tags for imported files (default)",
)
import_cmd.parser.add_option(
- '-A', '--noautotag', action='store_false', dest='autotag',
- help="don't infer tags for imported files (opposite of -a)"
+ "-A",
+ "--noautotag",
+ action="store_false",
+ dest="autotag",
+ help="don't infer tags for imported files (opposite of -a)",
)
import_cmd.parser.add_option(
- '-p', '--resume', action='store_true', default=None,
- help="resume importing if interrupted"
+ "-p",
+ "--resume",
+ action="store_true",
+ default=None,
+ help="resume importing if interrupted",
)
import_cmd.parser.add_option(
- '-P', '--noresume', action='store_false', dest='resume',
- help="do not try to resume importing"
+ "-P",
+ "--noresume",
+ action="store_false",
+ dest="resume",
+ help="do not try to resume importing",
)
import_cmd.parser.add_option(
- '-q', '--quiet', action='store_true', dest='quiet',
- help="never prompt for input: skip albums instead"
+ "-q",
+ "--quiet",
+ action="store_true",
+ dest="quiet",
+ help="never prompt for input: skip albums instead",
)
import_cmd.parser.add_option(
- '-l', '--log', dest='log',
- help='file to log untaggable albums for later review'
+ "-l",
+ "--log",
+ dest="log",
+ help="file to log untaggable albums for later review",
)
import_cmd.parser.add_option(
- '-s', '--singletons', action='store_true',
- help='import individual tracks instead of full albums'
+ "-s",
+ "--singletons",
+ action="store_true",
+ help="import individual tracks instead of full albums",
)
import_cmd.parser.add_option(
- '-t', '--timid', dest='timid', action='store_true',
- help='always confirm all actions'
+ "-t",
+ "--timid",
+ dest="timid",
+ action="store_true",
+ help="always confirm all actions",
)
import_cmd.parser.add_option(
- '-L', '--library', dest='library', action='store_true',
- help='retag items matching a query'
+ "-L",
+ "--library",
+ dest="library",
+ action="store_true",
+ help="retag items matching a query",
)
import_cmd.parser.add_option(
- '-i', '--incremental', dest='incremental', action='store_true',
- help='skip already-imported directories'
+ "-i",
+ "--incremental",
+ dest="incremental",
+ action="store_true",
+ help="skip already-imported directories",
)
import_cmd.parser.add_option(
- '-I', '--noincremental', dest='incremental', action='store_false',
- help='do not skip already-imported directories'
+ "-I",
+ "--noincremental",
+ dest="incremental",
+ action="store_false",
+ help="do not skip already-imported directories",
)
import_cmd.parser.add_option(
- '--from-scratch', dest='from_scratch', action='store_true',
- help='erase existing metadata before applying new metadata'
+ "--from-scratch",
+ dest="from_scratch",
+ action="store_true",
+ help="erase existing metadata before applying new metadata",
)
import_cmd.parser.add_option(
- '--flat', dest='flat', action='store_true',
- help='import an entire tree as a single album'
+ "--flat",
+ dest="flat",
+ action="store_true",
+ help="import an entire tree as a single album",
)
import_cmd.parser.add_option(
- '-g', '--group-albums', dest='group_albums', action='store_true',
- help='group tracks in a folder into separate albums'
+ "-g",
+ "--group-albums",
+ dest="group_albums",
+ action="store_true",
+ help="group tracks in a folder into separate albums",
)
import_cmd.parser.add_option(
- '--pretend', dest='pretend', action='store_true',
- help='just print the files to import'
+ "--pretend",
+ dest="pretend",
+ action="store_true",
+ help="just print the files to import",
)
import_cmd.parser.add_option(
- '-S', '--search-id', dest='search_ids', action='append',
- metavar='ID',
- help='restrict matching to a specific metadata backend ID'
+ "-S",
+ "--search-id",
+ dest="search_ids",
+ action="append",
+ metavar="ID",
+ help="restrict matching to a specific metadata backend ID",
)
import_cmd.parser.add_option(
- '--from-logfile', dest='from_logfiles', action='append',
- metavar='PATH',
- help='read skipped paths from an existing logfile'
+ "--from-logfile",
+ dest="from_logfiles",
+ action="append",
+ metavar="PATH",
+ help="read skipped paths from an existing logfile",
)
import_cmd.parser.add_option(
- '--set', dest='set_fields', action='callback',
+ "--set",
+ dest="set_fields",
+ action="callback",
callback=_store_dict,
- metavar='FIELD=VALUE',
- help='set the given fields to the supplied values'
+ metavar="FIELD=VALUE",
+ help="set the given fields to the supplied values",
)
import_cmd.func = import_func
default_commands.append(import_cmd)
@@ -1414,7 +1551,8 @@ def import_func(lib, opts, args):
# list: Query and show library contents.
-def list_items(lib, query, album, fmt=''):
+
+def list_items(lib, query, album, fmt=""):
"""Print out items in lib matching query. If album, then search for
albums instead of single items.
"""
@@ -1430,9 +1568,10 @@ def list_func(lib, opts, args):
list_items(lib, decargs(args), opts.album)
-list_cmd = ui.Subcommand('list', help='query the library', aliases=('ls',))
-list_cmd.parser.usage += "\n" \
- 'Example: %prog -f \'$album: $title\' artist:beatles'
+list_cmd = ui.Subcommand("list", help="query the library", aliases=("ls",))
+list_cmd.parser.usage += (
+ "\n" "Example: %prog -f '$album: $title' artist:beatles"
+)
list_cmd.parser.add_all_common_options()
list_cmd.func = list_func
default_commands.append(list_cmd)
@@ -1440,8 +1579,8 @@ def list_func(lib, opts, args):
# update: Update library contents according to on-disk tags.
-def update_items(lib, query, album, move, pretend, fields,
- exclude_fields=None):
+
+def update_items(lib, query, album, move, pretend, fields, exclude_fields=None):
"""For all the items matched by the query, update the library to
reflect the item's embedded tags.
:param fields: The fields to be stored. If not specified, all fields will
@@ -1451,17 +1590,17 @@ def update_items(lib, query, album, move, pretend, fields,
"""
with lib.transaction():
items, _ = _do_query(lib, query, album)
- if move and fields is not None and 'path' not in fields:
+ if move and fields is not None and "path" not in fields:
# Special case: if an item needs to be moved, the path field has to
# updated; otherwise the new path will not be reflected in the
# database.
- fields.append('path')
+ fields.append("path")
if fields is None:
# no fields were provided, update all media fields
item_fields = fields or library.Item._media_fields
- if move and 'path' not in item_fields:
+ if move and "path" not in item_fields:
# move is enabled, add 'path' to the list of fields to update
- item_fields.add('path')
+ item_fields.add("path")
else:
# fields was provided, just update those
item_fields = fields
@@ -1478,7 +1617,7 @@ def update_items(lib, query, album, move, pretend, fields,
# Item deleted?
if not item.path or not os.path.exists(syspath(item.path)):
ui.print_(format(item))
- ui.print_(ui.colorize('text_error', ' deleted'))
+ ui.print_(ui.colorize("text_error", " deleted"))
if not pretend:
item.remove(True)
affected_albums.add(item.album_id)
@@ -1486,16 +1625,20 @@ def update_items(lib, query, album, move, pretend, fields,
# Did the item change since last checked?
if item.current_mtime() <= item.mtime:
- log.debug('skipping {0} because mtime is up to date ({1})',
- displayable_path(item.path), item.mtime)
+ log.debug(
+ "skipping {0} because mtime is up to date ({1})",
+ displayable_path(item.path),
+ item.mtime,
+ )
continue
# Read new data.
try:
item.read()
except library.ReadError as exc:
- log.error('error reading {0}: {1}',
- displayable_path(item.path), exc)
+ log.error(
+ "error reading {0}: {1}", displayable_path(item.path), exc
+ )
continue
# Special-case album artist when it matches track artist. (Hacky
@@ -1505,12 +1648,10 @@ def update_items(lib, query, album, move, pretend, fields,
old_item = lib.get_item(item.id)
if old_item.albumartist == old_item.artist == item.artist:
item.albumartist = old_item.albumartist
- item._dirty.discard('albumartist')
+ item._dirty.discard("albumartist")
# Check for and display changes.
- changed = ui.show_model_changes(
- item,
- fields=item_fields)
+ changed = ui.show_model_changes(item, fields=item_fields)
# Save changes.
if not pretend:
@@ -1538,7 +1679,7 @@ def update_items(lib, query, album, move, pretend, fields,
continue
album = lib.get_album(album_id)
if not album: # Empty albums have already been removed.
- log.debug('emptied album {0}', album_id)
+ log.debug("emptied album {0}", album_id)
continue
first_item = album.items().get()
@@ -1549,7 +1690,7 @@ def update_items(lib, query, album, move, pretend, fields,
# Move album art (and any inconsistent items).
if move and lib.directory in ancestry(first_item.path):
- log.debug('moving album {0}', album_id)
+ log.debug("moving album {0}", album_id)
# Manually moving and storing the album.
items = list(album.items())
@@ -1567,35 +1708,62 @@ def update_func(lib, opts, args):
ui.print_(lib.directory)
if not ui.input_yn("Are you sure you want to continue (y/n)?", True):
return
- update_items(lib, decargs(args), opts.album, ui.should_move(opts.move),
- opts.pretend, opts.fields, opts.exclude_fields)
+ update_items(
+ lib,
+ decargs(args),
+ opts.album,
+ ui.should_move(opts.move),
+ opts.pretend,
+ opts.fields,
+ opts.exclude_fields,
+ )
update_cmd = ui.Subcommand(
- 'update', help='update the library', aliases=('upd', 'up',)
+ "update",
+ help="update the library",
+ aliases=(
+ "upd",
+ "up",
+ ),
)
update_cmd.parser.add_album_option()
update_cmd.parser.add_format_option()
update_cmd.parser.add_option(
- '-m', '--move', action='store_true', dest='move',
- help="move files in the library directory"
+ "-m",
+ "--move",
+ action="store_true",
+ dest="move",
+ help="move files in the library directory",
)
update_cmd.parser.add_option(
- '-M', '--nomove', action='store_false', dest='move',
- help="don't move files in library"
+ "-M",
+ "--nomove",
+ action="store_false",
+ dest="move",
+ help="don't move files in library",
)
update_cmd.parser.add_option(
- '-p', '--pretend', action='store_true',
- help="show all changes but do nothing"
+ "-p",
+ "--pretend",
+ action="store_true",
+ help="show all changes but do nothing",
)
update_cmd.parser.add_option(
- '-F', '--field', default=None, action='append', dest='fields',
- help='list of fields to update'
+ "-F",
+ "--field",
+ default=None,
+ action="append",
+ dest="fields",
+ help="list of fields to update",
)
update_cmd.parser.add_option(
- '-e', '--exclude-field', default=None, action='append',
- dest='exclude_fields',
- help='list of fields to exclude from updates'
+ "-e",
+ "--exclude-field",
+ default=None,
+ action="append",
+ dest="exclude_fields",
+ help="list of fields to exclude from updates",
)
update_cmd.func = update_func
default_commands.append(update_cmd)
@@ -1603,6 +1771,7 @@ def update_func(lib, opts, args):
# remove: Remove items from library, delete files.
+
def remove_items(lib, query, album, delete, force):
"""Remove items matching query from lib. If album, then match and
remove whole albums. If delete, also remove files from disk.
@@ -1614,21 +1783,23 @@ def remove_items(lib, query, album, delete, force):
# Confirm file removal if not forcing removal.
if not force:
# Prepare confirmation with user.
- album_str = " in {} album{}".format(
- len(albums), 's' if len(albums) > 1 else ''
- ) if album else ""
+ album_str = (
+ " in {} album{}".format(len(albums), "s" if len(albums) > 1 else "")
+ if album
+ else ""
+ )
if delete:
- fmt = '$path - $title'
- prompt = 'Really DELETE'
- prompt_all = 'Really DELETE {} file{}{}'.format(
- len(items), 's' if len(items) > 1 else '', album_str
+ fmt = "$path - $title"
+ prompt = "Really DELETE"
+ prompt_all = "Really DELETE {} file{}{}".format(
+ len(items), "s" if len(items) > 1 else "", album_str
)
else:
- fmt = ''
- prompt = 'Really remove from the library?'
- prompt_all = 'Really remove {} item{}{} from the library?'.format(
- len(items), 's' if len(items) > 1 else '', album_str
+ fmt = ""
+ prompt = "Really remove from the library?"
+ prompt_all = "Really remove {} item{}{} from the library?".format(
+ len(items), "s" if len(items) > 1 else "", album_str
)
# Helpers for printing affected items
@@ -1647,8 +1818,9 @@ def fmt_album(a):
fmt_obj(o)
# Confirm with user.
- objs = ui.input_select_objects(prompt, objs, fmt_obj,
- prompt_all=prompt_all)
+ objs = ui.input_select_objects(
+ prompt, objs, fmt_obj, prompt_all=prompt_all
+ )
if not objs:
return
@@ -1664,15 +1836,13 @@ def remove_func(lib, opts, args):
remove_cmd = ui.Subcommand(
- 'remove', help='remove matching items from the library', aliases=('rm',)
+ "remove", help="remove matching items from the library", aliases=("rm",)
)
remove_cmd.parser.add_option(
- "-d", "--delete", action="store_true",
- help="also remove files from disk"
+ "-d", "--delete", action="store_true", help="also remove files from disk"
)
remove_cmd.parser.add_option(
- "-f", "--force", action="store_true",
- help="do not ask when removing items"
+ "-f", "--force", action="store_true", help="do not ask when removing items"
)
remove_cmd.parser.add_album_option()
remove_cmd.func = remove_func
@@ -1681,6 +1851,7 @@ def remove_func(lib, opts, args):
# stats: Show library/query statistics.
+
def show_stats(lib, query, exact):
"""Shows some statistics about the matched items."""
items = lib.items(query)
@@ -1697,7 +1868,7 @@ def show_stats(lib, query, exact):
try:
total_size += os.path.getsize(syspath(item.path))
except OSError as exc:
- log.info('could not get size of {}: {}', item.path, exc)
+ log.info("could not get size of {}: {}", item.path, exc)
else:
total_size += int(item.length * item.bitrate / 8)
total_time += item.length
@@ -1707,24 +1878,26 @@ def show_stats(lib, query, exact):
if item.album_id:
albums.add(item.album_id)
- size_str = '' + ui.human_bytes(total_size)
+ size_str = "" + ui.human_bytes(total_size)
if exact:
- size_str += f' ({total_size} bytes)'
+ size_str += f" ({total_size} bytes)"
- print_("""Tracks: {}
+ print_(
+ """Tracks: {}
Total time: {}{}
{}: {}
Artists: {}
Albums: {}
Album artists: {}""".format(
- total_items,
- ui.human_seconds(total_time),
- f' ({total_time:.2f} seconds)' if exact else '',
- 'Total size' if exact else 'Approximate total size',
- size_str,
- len(artists),
- len(albums),
- len(album_artists)),
+ total_items,
+ ui.human_seconds(total_time),
+ f" ({total_time:.2f} seconds)" if exact else "",
+ "Total size" if exact else "Approximate total size",
+ size_str,
+ len(artists),
+ len(albums),
+ len(album_artists),
+ ),
)
@@ -1733,11 +1906,10 @@ def stats_func(lib, opts, args):
stats_cmd = ui.Subcommand(
- 'stats', help='show statistics about the library or a query'
+ "stats", help="show statistics about the library or a query"
)
stats_cmd.parser.add_option(
- '-e', '--exact', action='store_true',
- help='exact size and time'
+ "-e", "--exact", action="store_true", help="exact size and time"
)
stats_cmd.func = stats_func
default_commands.append(stats_cmd)
@@ -1745,26 +1917,26 @@ def stats_func(lib, opts, args):
# version: Show current beets version.
+
def show_version(lib, opts, args):
- print_('beets version %s' % beets.__version__)
- print_(f'Python version {python_version()}')
+ print_("beets version %s" % beets.__version__)
+ print_(f"Python version {python_version()}")
# Show plugins.
names = sorted(p.name for p in plugins.find_plugins())
if names:
- print_('plugins:', ', '.join(names))
+ print_("plugins:", ", ".join(names))
else:
- print_('no plugins loaded')
+ print_("no plugins loaded")
-version_cmd = ui.Subcommand(
- 'version', help='output version information'
-)
+version_cmd = ui.Subcommand("version", help="output version information")
version_cmd.func = show_version
default_commands.append(version_cmd)
# modify: Declaratively change metadata.
+
def modify_items(lib, mods, dels, query, write, move, album, confirm, inherit):
"""Modifies matching items according to user-specified assignments and
deletions.
@@ -1781,11 +1953,11 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm, inherit):
# Apply changes *temporarily*, preview them, and collect modified
# objects.
- print_('Modifying {} {}s.'
- .format(len(objs), 'album' if album else 'item'))
+ print_("Modifying {} {}s.".format(len(objs), "album" if album else "item"))
changed = []
- templates = {key: functemplate.template(value)
- for key, value in mods.items()}
+ templates = {
+ key: functemplate.template(value) for key, value in mods.items()
+ }
for obj in objs:
obj_mods = {
key: model_cls._parse(key, obj.evaluate_template(templates[key]))
@@ -1796,23 +1968,24 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm, inherit):
# Still something to do?
if not changed:
- print_('No changes to make.')
+ print_("No changes to make.")
return
# Confirm action.
if confirm:
if write and move:
- extra = ', move and write tags'
+ extra = ", move and write tags"
elif write:
- extra = ' and write tags'
+ extra = " and write tags"
elif move:
- extra = ' and move'
+ extra = " and move"
else:
- extra = ''
+ extra = ""
changed = ui.input_select_objects(
- 'Really modify%s' % extra, changed,
- lambda o: print_and_modify(o, mods, dels)
+ "Really modify%s" % extra,
+ changed,
+ lambda o: print_and_modify(o, mods, dels),
)
# Apply changes to database and files
@@ -1846,10 +2019,10 @@ def modify_parse_args(args):
dels = []
query = []
for arg in args:
- if arg.endswith('!') and '=' not in arg and ':' not in arg:
+ if arg.endswith("!") and "=" not in arg and ":" not in arg:
dels.append(arg[:-1]) # Strip trailing !.
- elif '=' in arg and ':' not in arg.split('=', 1)[0]:
- key, val = arg.split('=', 1)
+ elif "=" in arg and ":" not in arg.split("=", 1)[0]:
+ key, val = arg.split("=", 1)
mods[key] = val
else:
query.append(arg)
@@ -1859,40 +2032,63 @@ def modify_parse_args(args):
def modify_func(lib, opts, args):
query, mods, dels = modify_parse_args(decargs(args))
if not mods and not dels:
- raise ui.UserError('no modifications specified')
- modify_items(lib, mods, dels, query, ui.should_write(opts.write),
- ui.should_move(opts.move), opts.album, not opts.yes,
- opts.inherit)
+ raise ui.UserError("no modifications specified")
+ modify_items(
+ lib,
+ mods,
+ dels,
+ query,
+ ui.should_write(opts.write),
+ ui.should_move(opts.move),
+ opts.album,
+ not opts.yes,
+ opts.inherit,
+ )
modify_cmd = ui.Subcommand(
- 'modify', help='change metadata fields', aliases=('mod',)
+ "modify", help="change metadata fields", aliases=("mod",)
)
modify_cmd.parser.add_option(
- '-m', '--move', action='store_true', dest='move',
- help="move files in the library directory"
+ "-m",
+ "--move",
+ action="store_true",
+ dest="move",
+ help="move files in the library directory",
)
modify_cmd.parser.add_option(
- '-M', '--nomove', action='store_false', dest='move',
- help="don't move files in library"
+ "-M",
+ "--nomove",
+ action="store_false",
+ dest="move",
+ help="don't move files in library",
)
modify_cmd.parser.add_option(
- '-w', '--write', action='store_true', default=None,
- help="write new metadata to files' tags (default)"
+ "-w",
+ "--write",
+ action="store_true",
+ default=None,
+ help="write new metadata to files' tags (default)",
)
modify_cmd.parser.add_option(
- '-W', '--nowrite', action='store_false', dest='write',
- help="don't write metadata (opposite of -w)"
+ "-W",
+ "--nowrite",
+ action="store_false",
+ dest="write",
+ help="don't write metadata (opposite of -w)",
)
modify_cmd.parser.add_album_option()
-modify_cmd.parser.add_format_option(target='item')
+modify_cmd.parser.add_format_option(target="item")
modify_cmd.parser.add_option(
- '-y', '--yes', action='store_true',
- help='skip confirmation'
+ "-y", "--yes", action="store_true", help="skip confirmation"
)
modify_cmd.parser.add_option(
- '-I', '--noinherit', action='store_false', dest='inherit', default=True,
- help="when modifying albums, don't also change item data"
+ "-I",
+ "--noinherit",
+ action="store_false",
+ dest="inherit",
+ default=True,
+ help="when modifying albums, don't also change item data",
)
modify_cmd.func = modify_func
default_commands.append(modify_cmd)
@@ -1900,8 +2096,10 @@ def modify_func(lib, opts, args):
# move: Move/copy files to the library or a new base directory.
-def move_items(lib, dest, query, copy, album, pretend, confirm=False,
- export=False):
+
+def move_items(
+ lib, dest, query, copy, album, pretend, confirm=False, export=False
+):
"""Moves or copies items to a new base directory, given by dest. If
dest is None, then the library's base directory is used, making the
command "consolidate" files.
@@ -1920,40 +2118,56 @@ def isalbummoved(album):
objs = [o for o in objs if (isalbummoved if album else isitemmoved)(o)]
num_unmoved = num_objs - len(objs)
# Report unmoved files that match the query.
- unmoved_msg = ''
+ unmoved_msg = ""
if num_unmoved > 0:
- unmoved_msg = f' ({num_unmoved} already in place)'
+ unmoved_msg = f" ({num_unmoved} already in place)"
copy = copy or export # Exporting always copies.
- action = 'Copying' if copy else 'Moving'
- act = 'copy' if copy else 'move'
- entity = 'album' if album else 'item'
- log.info('{0} {1} {2}{3}{4}.', action, len(objs), entity,
- 's' if len(objs) != 1 else '', unmoved_msg)
+ action = "Copying" if copy else "Moving"
+ act = "copy" if copy else "move"
+ entity = "album" if album else "item"
+ log.info(
+ "{0} {1} {2}{3}{4}.",
+ action,
+ len(objs),
+ entity,
+ "s" if len(objs) != 1 else "",
+ unmoved_msg,
+ )
if not objs:
return
if pretend:
if album:
- show_path_changes([(item.path, item.destination(basedir=dest))
- for obj in objs for item in obj.items()])
+ show_path_changes(
+ [
+ (item.path, item.destination(basedir=dest))
+ for obj in objs
+ for item in obj.items()
+ ]
+ )
else:
- show_path_changes([(obj.path, obj.destination(basedir=dest))
- for obj in objs])
+ show_path_changes(
+ [(obj.path, obj.destination(basedir=dest)) for obj in objs]
+ )
else:
if confirm:
objs = ui.input_select_objects(
- 'Really %s' % act, objs,
+ "Really %s" % act,
+ objs,
lambda o: show_path_changes(
- [(o.path, o.destination(basedir=dest))]))
+ [(o.path, o.destination(basedir=dest))]
+ ),
+ )
for obj in objs:
- log.debug('moving: {0}', util.displayable_path(obj.path))
+ log.debug("moving: {0}", util.displayable_path(obj.path))
if export:
# Copy without affecting the database.
- obj.move(operation=MoveOperation.COPY, basedir=dest,
- store=False)
+ obj.move(
+ operation=MoveOperation.COPY, basedir=dest, store=False
+ )
else:
# Ordinary move/copy: store the new path.
if copy:
@@ -1967,36 +2181,53 @@ def move_func(lib, opts, args):
if dest is not None:
dest = normpath(dest)
if not os.path.isdir(syspath(dest)):
- raise ui.UserError('no such directory: {}'.format(
- displayable_path(dest)
- ))
+ raise ui.UserError(
+ "no such directory: {}".format(displayable_path(dest))
+ )
- move_items(lib, dest, decargs(args), opts.copy, opts.album, opts.pretend,
- opts.timid, opts.export)
+ move_items(
+ lib,
+ dest,
+ decargs(args),
+ opts.copy,
+ opts.album,
+ opts.pretend,
+ opts.timid,
+ opts.export,
+ )
-move_cmd = ui.Subcommand(
- 'move', help='move or copy items', aliases=('mv',)
-)
+move_cmd = ui.Subcommand("move", help="move or copy items", aliases=("mv",))
move_cmd.parser.add_option(
- '-d', '--dest', metavar='DIR', dest='dest',
- help='destination directory'
+ "-d", "--dest", metavar="DIR", dest="dest", help="destination directory"
)
move_cmd.parser.add_option(
- '-c', '--copy', default=False, action='store_true',
- help='copy instead of moving'
+ "-c",
+ "--copy",
+ default=False,
+ action="store_true",
+ help="copy instead of moving",
)
move_cmd.parser.add_option(
- '-p', '--pretend', default=False, action='store_true',
- help='show how files would be moved, but don\'t touch anything'
+ "-p",
+ "--pretend",
+ default=False,
+ action="store_true",
+ help="show how files would be moved, but don't touch anything",
)
move_cmd.parser.add_option(
- '-t', '--timid', dest='timid', action='store_true',
- help='always confirm all actions'
+ "-t",
+ "--timid",
+ dest="timid",
+ action="store_true",
+ help="always confirm all actions",
)
move_cmd.parser.add_option(
- '-e', '--export', default=False, action='store_true',
- help='copy without changing the database path'
+ "-e",
+ "--export",
+ default=False,
+ action="store_true",
+ help="copy without changing the database path",
)
move_cmd.parser.add_album_option()
move_cmd.func = move_func
@@ -2005,6 +2236,7 @@ def move_func(lib, opts, args):
# write: Write tags into files.
+
def write_items(lib, query, pretend, force):
"""Write tag information from the database to the respective files
in the filesystem.
@@ -2014,20 +2246,22 @@ def write_items(lib, query, pretend, force):
for item in items:
# Item deleted?
if not os.path.exists(syspath(item.path)):
- log.info('missing file: {0}', util.displayable_path(item.path))
+ log.info("missing file: {0}", util.displayable_path(item.path))
continue
# Get an Item object reflecting the "clean" (on-disk) state.
try:
clean_item = library.Item.from_path(item.path)
except library.ReadError as exc:
- log.error('error reading {0}: {1}',
- displayable_path(item.path), exc)
+ log.error(
+ "error reading {0}: {1}", displayable_path(item.path), exc
+ )
continue
# Check for and display changes.
- changed = ui.show_model_changes(item, clean_item,
- library.Item._media_tag_fields, force)
+ changed = ui.show_model_changes(
+ item, clean_item, library.Item._media_tag_fields, force
+ )
if (changed or force) and not pretend:
# We use `try_sync` here to keep the mtime up to date in the
# database.
@@ -2038,14 +2272,18 @@ def write_func(lib, opts, args):
write_items(lib, decargs(args), opts.pretend, opts.force)
-write_cmd = ui.Subcommand('write', help='write tag information to files')
+write_cmd = ui.Subcommand("write", help="write tag information to files")
write_cmd.parser.add_option(
- '-p', '--pretend', action='store_true',
- help="show all changes but do nothing"
+ "-p",
+ "--pretend",
+ action="store_true",
+ help="show all changes but do nothing",
)
write_cmd.parser.add_option(
- '-f', '--force', action='store_true',
- help="write tags even if the existing tags match the database"
+ "-f",
+ "--force",
+ action="store_true",
+ help="write tags even if the existing tags match the database",
)
write_cmd.func = write_func
default_commands.append(write_cmd)
@@ -2053,6 +2291,7 @@ def write_func(lib, opts, args):
# config: Show and edit user configuration.
+
def config_func(lib, opts, args):
# Make sure lazy configuration is loaded
config.resolve()
@@ -2082,7 +2321,7 @@ def config_func(lib, opts, args):
# Dump configuration.
else:
config_out = config.dump(full=opts.defaults, redact=opts.redact)
- if config_out.strip() != '{}':
+ if config_out.strip() != "{}":
print_(config_out)
else:
print("Empty configuration")
@@ -2096,7 +2335,7 @@ def config_edit():
editor = util.editor_command()
try:
if not os.path.isfile(path):
- open(path, 'w+').close()
+ open(path, "w+").close()
util.interactive_open([path], editor)
except OSError as exc:
message = f"Could not edit configuration: {exc}"
@@ -2105,24 +2344,32 @@ def config_edit():
raise ui.UserError(message)
-config_cmd = ui.Subcommand('config',
- help='show or edit the user configuration')
+config_cmd = ui.Subcommand("config", help="show or edit the user configuration")
config_cmd.parser.add_option(
- '-p', '--paths', action='store_true',
- help='show files that configuration was loaded from'
+ "-p",
+ "--paths",
+ action="store_true",
+ help="show files that configuration was loaded from",
)
config_cmd.parser.add_option(
- '-e', '--edit', action='store_true',
- help='edit user configuration with $EDITOR'
+ "-e",
+ "--edit",
+ action="store_true",
+ help="edit user configuration with $EDITOR",
)
config_cmd.parser.add_option(
- '-d', '--defaults', action='store_true',
- help='include the default configuration'
+ "-d",
+ "--defaults",
+ action="store_true",
+ help="include the default configuration",
)
config_cmd.parser.add_option(
- '-c', '--clear', action='store_false',
- dest='redact', default=True,
- help='do not redact sensitive fields'
+ "-c",
+ "--clear",
+ action="store_false",
+ dest="redact",
+ default=True,
+ help="do not redact sensitive fields",
)
config_cmd.func = config_func
default_commands.append(config_cmd)
@@ -2130,22 +2377,25 @@ def config_edit():
# completion: print completion script
+
def print_completion(*args):
for line in completion_script(default_commands + plugins.commands()):
- print_(line, end='')
+ print_(line, end="")
if not any(os.path.isfile(syspath(p)) for p in BASH_COMPLETION_PATHS):
- log.warning('Warning: Unable to find the bash-completion package. '
- 'Command line completion might not work.')
+ log.warning(
+ "Warning: Unable to find the bash-completion package. "
+ "Command line completion might not work."
+ )
BASH_COMPLETION_PATHS = [
- b'/etc/bash_completion',
- b'/usr/share/bash-completion/bash_completion',
- b'/usr/local/share/bash-completion/bash_completion',
+ b"/etc/bash_completion",
+ b"/usr/share/bash-completion/bash_completion",
+ b"/usr/local/share/bash-completion/bash_completion",
# SmartOS
- b'/opt/local/share/bash-completion/bash_completion',
+ b"/opt/local/share/bash-completion/bash_completion",
# Homebrew (before bash-completion2)
- b'/usr/local/etc/bash_completion',
+ b"/usr/local/etc/bash_completion",
]
@@ -2155,7 +2405,7 @@ def completion_script(commands):
``commands`` is alist of ``ui.Subcommand`` instances to generate
completion data for.
"""
- base_script = os.path.join(os.path.dirname(__file__), 'completion_base.sh')
+ base_script = os.path.join(os.path.dirname(__file__), "completion_base.sh")
with open(base_script) as base_script:
yield base_script.read()
@@ -2169,50 +2419,47 @@ def completion_script(commands):
command_names.append(name)
for alias in cmd.aliases:
- if re.match(r'^\w+$', alias):
+ if re.match(r"^\w+$", alias):
aliases[alias] = name
- options[name] = {'flags': [], 'opts': []}
+ options[name] = {"flags": [], "opts": []}
for opts in cmd.parser._get_all_options()[1:]:
- if opts.action in ('store_true', 'store_false'):
- option_type = 'flags'
+ if opts.action in ("store_true", "store_false"):
+ option_type = "flags"
else:
- option_type = 'opts'
+ option_type = "opts"
options[name][option_type].extend(
opts._short_opts + opts._long_opts
)
# Add global options
- options['_global'] = {
- 'flags': ['-v', '--verbose'],
- 'opts':
- '-l --library -c --config -d --directory -h --help'.split(' ')
+ options["_global"] = {
+ "flags": ["-v", "--verbose"],
+ "opts": "-l --library -c --config -d --directory -h --help".split(" "),
}
# Add flags common to all commands
- options['_common'] = {
- 'flags': ['-h', '--help']
- }
+ options["_common"] = {"flags": ["-h", "--help"]}
# Start generating the script
yield "_beet() {\n"
# Command names
- yield " local commands='%s'\n" % ' '.join(command_names)
+ yield " local commands='%s'\n" % " ".join(command_names)
yield "\n"
# Command aliases
- yield " local aliases='%s'\n" % ' '.join(aliases.keys())
+ yield " local aliases='%s'\n" % " ".join(aliases.keys())
for alias, cmd in aliases.items():
- yield " local alias__{}={}\n".format(alias.replace('-', '_'), cmd)
- yield '\n'
+ yield " local alias__{}={}\n".format(alias.replace("-", "_"), cmd)
+ yield "\n"
# Fields
- yield " fields='%s'\n" % ' '.join(
+ yield " fields='%s'\n" % " ".join(
set(
- list(library.Item._fields.keys()) +
- list(library.Album._fields.keys())
+ list(library.Item._fields.keys())
+ + list(library.Album._fields.keys())
)
)
@@ -2220,17 +2467,18 @@ def completion_script(commands):
for cmd, opts in options.items():
for option_type, option_list in opts.items():
if option_list:
- option_list = ' '.join(option_list)
+ option_list = " ".join(option_list)
yield " local {}__{}='{}'\n".format(
- option_type, cmd.replace('-', '_'), option_list)
+ option_type, cmd.replace("-", "_"), option_list
+ )
- yield ' _beet_dispatch\n'
- yield '}\n'
+ yield " _beet_dispatch\n"
+ yield "}\n"
completion_cmd = ui.Subcommand(
- 'completion',
- help='print shell script that provides command line completion'
+ "completion",
+ help="print shell script that provides command line completion",
)
completion_cmd.func = print_completion
completion_cmd.hide = True
diff --git a/beets/util/__init__.py b/beets/util/__init__.py
index 11c829ec3a..fb07d7abc6 100644
--- a/beets/util/__init__.py
+++ b/beets/util/__init__.py
@@ -14,34 +14,46 @@
"""Miscellaneous utility functions."""
-import os
-import sys
import errno
-import re
-import tempfile
-import shutil
import fnmatch
import functools
+import os
+import platform
+import re
+import shlex
+import shutil
+import subprocess
+import sys
+import tempfile
+import traceback
from collections import Counter, namedtuple
+from enum import Enum
from logging import Logger
from multiprocessing.pool import ThreadPool
-import traceback
-import subprocess
-import platform
-import shlex
-from typing import Callable, List, Optional, Sequence, Pattern, \
- Tuple, MutableSequence, AnyStr, TypeVar, Generator, Any, \
- Iterable, Union
-from typing_extensions import TypeAlias
+from typing import (
+ Any,
+ AnyStr,
+ Callable,
+ Generator,
+ Iterable,
+ List,
+ MutableSequence,
+ Optional,
+ Pattern,
+ Sequence,
+ Tuple,
+ TypeVar,
+ Union,
+)
-from beets.util import hidden
+from typing_extensions import TypeAlias
from unidecode import unidecode
-from enum import Enum
+from beets.util import hidden
MAX_FILENAME_LENGTH = 200
-WINDOWS_MAGIC_PREFIX = '\\\\?\\'
-T = TypeVar('T')
+WINDOWS_MAGIC_PREFIX = "\\\\?\\"
+T = TypeVar("T")
Bytes_or_String: TypeAlias = Union[str, bytes]
@@ -58,7 +70,8 @@ class HumanReadableException(Exception):
associated exception. (Note that this is not necessary in Python 3.x
and should be removed when we make the transition.)
"""
- error_kind = 'Error' # Human-readable description of error type.
+
+ error_kind = "Error" # Human-readable description of error type.
def __init__(self, reason, verb, tb=None):
self.reason = reason
@@ -67,12 +80,11 @@ def __init__(self, reason, verb, tb=None):
super().__init__(self.get_message())
def _gerund(self):
- """Generate a (likely) gerund form of the English verb.
- """
- if ' ' in self.verb:
+ """Generate a (likely) gerund form of the English verb."""
+ if " " in self.verb:
return self.verb
- gerund = self.verb[:-1] if self.verb.endswith('e') else self.verb
- gerund += 'ing'
+ gerund = self.verb[:-1] if self.verb.endswith("e") else self.verb
+ gerund += "ing"
return gerund
def _reasonstr(self):
@@ -80,8 +92,8 @@ def _reasonstr(self):
if isinstance(self.reason, str):
return self.reason
elif isinstance(self.reason, bytes):
- return self.reason.decode('utf-8', 'ignore')
- elif hasattr(self.reason, 'strerror'): # i.e., EnvironmentError
+ return self.reason.decode("utf-8", "ignore")
+ elif hasattr(self.reason, "strerror"): # i.e., EnvironmentError
return self.reason.strerror
else:
return '"{}"'.format(str(self.reason))
@@ -98,7 +110,7 @@ def log(self, logger):
"""
if self.tb:
logger.debug(self.tb)
- logger.error('{0}: {1}', self.error_kind, self.args[0])
+ logger.error("{0}: {1}", self.error_kind, self.args[0])
class FilesystemError(HumanReadableException):
@@ -113,28 +125,27 @@ def __init__(self, reason, verb, paths, tb=None):
def get_message(self):
# Use a nicer English phrasing for some specific verbs.
- if self.verb in ('move', 'copy', 'rename'):
- clause = 'while {} {} to {}'.format(
+ if self.verb in ("move", "copy", "rename"):
+ clause = "while {} {} to {}".format(
self._gerund(),
displayable_path(self.paths[0]),
- displayable_path(self.paths[1])
+ displayable_path(self.paths[1]),
)
- elif self.verb in ('delete', 'write', 'create', 'read'):
- clause = 'while {} {}'.format(
- self._gerund(),
- displayable_path(self.paths[0])
+ elif self.verb in ("delete", "write", "create", "read"):
+ clause = "while {} {}".format(
+ self._gerund(), displayable_path(self.paths[0])
)
else:
- clause = 'during {} of paths {}'.format(
- self.verb, ', '.join(displayable_path(p) for p in self.paths)
+ clause = "during {} of paths {}".format(
+ self.verb, ", ".join(displayable_path(p) for p in self.paths)
)
- return f'{self._reasonstr()} {clause}'
+ return f"{self._reasonstr()} {clause}"
class MoveOperation(Enum):
- """The file operations that e.g. various move functions can carry out.
- """
+ """The file operations that e.g. various move functions can carry out."""
+
MOVE = 0
COPY = 1
LINK = 2
@@ -177,10 +188,10 @@ def ancestry(path: bytes) -> List[str]:
def sorted_walk(
- path: AnyStr,
- ignore: Sequence = (),
- ignore_hidden: bool = False,
- logger: Optional[Logger] = None,
+ path: AnyStr,
+ ignore: Sequence = (),
+ ignore_hidden: bool = False,
+ logger: Optional[Logger] = None,
) -> Generator[Tuple, None, None]:
"""Like `os.walk`, but yields things in case-insensitive sorted,
breadth-first order. Directory and file names matching any glob
@@ -196,9 +207,11 @@ def sorted_walk(
contents = os.listdir(syspath(path))
except OSError as exc:
if logger:
- logger.warning('could not list directory {}: {}'.format(
- displayable_path(path), exc.strerror
- ))
+ logger.warning(
+ "could not list directory {}: {}".format(
+ displayable_path(path), exc.strerror
+ )
+ )
return
dirs = []
files = []
@@ -210,9 +223,9 @@ def sorted_walk(
for pat in ignore:
if fnmatch.fnmatch(base, pat):
if logger:
- logger.debug('ignoring {} due to ignore rule {}'.format(
- base, pat
- ))
+ logger.debug(
+ "ignoring {} due to ignore rule {}".format(base, pat)
+ )
skip = True
break
if skip:
@@ -242,7 +255,7 @@ def path_as_posix(path: bytes) -> bytes:
"""Return the string representation of the path with forward (/)
slashes.
"""
- return path.replace(b'\\', b'/')
+ return path.replace(b"\\", b"/")
def mkdirall(path: bytes):
@@ -254,8 +267,9 @@ def mkdirall(path: bytes):
try:
os.mkdir(syspath(ancestor))
except OSError as exc:
- raise FilesystemError(exc, 'create', (ancestor,),
- traceback.format_exc())
+ raise FilesystemError(
+ exc, "create", (ancestor,), traceback.format_exc()
+ )
def fnmatch_all(names: Sequence[bytes], patterns: Sequence[bytes]) -> bool:
@@ -274,9 +288,9 @@ def fnmatch_all(names: Sequence[bytes], patterns: Sequence[bytes]) -> bool:
def prune_dirs(
- path: str,
- root: Optional[Bytes_or_String] = None,
- clutter: Sequence[str] = ('.DS_Store', 'Thumbs.db'),
+ path: str,
+ root: Optional[Bytes_or_String] = None,
+ clutter: Sequence[str] = (".DS_Store", "Thumbs.db"),
):
"""If path is an empty directory, then remove it. Recursively remove
path's ancestry up to root (which is never removed) where there are
@@ -295,7 +309,7 @@ def prune_dirs(
ancestors = []
elif root in ancestors:
# Only remove directories below the root.
- ancestors = ancestors[ancestors.index(root) + 1:]
+ ancestors = ancestors[ancestors.index(root) + 1 :]
else:
# Remove nothing.
return
@@ -356,13 +370,13 @@ def _fsencoding() -> str:
UTF-8 (not MBCS).
"""
encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
- if encoding == 'mbcs':
+ if encoding == "mbcs":
# On Windows, a broken encoding known to Python as "MBCS" is
# used for the filesystem. However, we only use the Unicode API
# for Windows paths, so the encoding is actually immaterial so
# we can avoid dealing with this nastiness. We arbitrarily
# choose UTF-8.
- encoding = 'utf-8'
+ encoding = "utf-8"
return encoding
@@ -378,20 +392,20 @@ def bytestring_path(path: Bytes_or_String) -> bytes:
# On Windows, remove the magic prefix added by `syspath`. This makes
# ``bytestring_path(syspath(X)) == X``, i.e., we can safely
# round-trip through `syspath`.
- if os.path.__name__ == 'ntpath' and path.startswith(WINDOWS_MAGIC_PREFIX):
- path = path[len(WINDOWS_MAGIC_PREFIX):]
+ if os.path.__name__ == "ntpath" and path.startswith(WINDOWS_MAGIC_PREFIX):
+ path = path[len(WINDOWS_MAGIC_PREFIX) :]
# Try to encode with default encodings, but fall back to utf-8.
try:
return path.encode(_fsencoding())
except (UnicodeError, LookupError):
- return path.encode('utf-8')
+ return path.encode("utf-8")
PATH_SEP: bytes = bytestring_path(os.sep)
-def displayable_path(path: bytes, separator: str = '; ') -> str:
+def displayable_path(path: bytes, separator: str = "; ") -> str:
"""Attempts to decode a bytestring path to a unicode object for the
purpose of displaying it to the user. If the `path` argument is a
list or a tuple, the elements are joined with `separator`.
@@ -405,9 +419,9 @@ def displayable_path(path: bytes, separator: str = '; ') -> str:
return str(path)
try:
- return path.decode(_fsencoding(), 'ignore')
+ return path.decode(_fsencoding(), "ignore")
except (UnicodeError, LookupError):
- return path.decode('utf-8', 'ignore')
+ return path.decode("utf-8", "ignore")
def syspath(path: bytes, prefix: bool = True) -> Bytes_or_String:
@@ -418,7 +432,7 @@ def syspath(path: bytes, prefix: bool = True) -> Bytes_or_String:
*really* know what you're doing.
"""
# Don't do anything if we're not on windows
- if os.path.__name__ != 'ntpath':
+ if os.path.__name__ != "ntpath":
return path
if not isinstance(path, str):
@@ -426,20 +440,20 @@ def syspath(path: bytes, prefix: bool = True) -> Bytes_or_String:
# arbitrarily. But earlier versions used MBCS because it is
# reported as the FS encoding by Windows. Try both.
try:
- path = path.decode('utf-8')
+ path = path.decode("utf-8")
except UnicodeError:
# The encoding should always be MBCS, Windows' broken
# Unicode representation.
assert isinstance(path, bytes)
encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
- path = path.decode(encoding, 'replace')
+ path = path.decode(encoding, "replace")
# Add the magic prefix if it isn't already there.
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx
if prefix and not path.startswith(WINDOWS_MAGIC_PREFIX):
- if path.startswith('\\\\'):
+ if path.startswith("\\\\"):
# UNC path. Final path should look like \\?\UNC\...
- path = 'UNC' + path[1:]
+ path = "UNC" + path[1:]
path = WINDOWS_MAGIC_PREFIX + path
return path
@@ -462,10 +476,10 @@ def remove(path: Optional[bytes], soft: bool = True):
try:
os.remove(path)
except OSError as exc:
- raise FilesystemError(exc, 'delete', (path,), traceback.format_exc())
+ raise FilesystemError(exc, "delete", (path,), traceback.format_exc())
-def copy(path: bytes, dest: bytes, replace: bool = False):
+def copy(path: bytes, dest: bytes, replace: bool = False):
"""Copy a plain file. Permissions are not copied. If `dest` already
exists, raises a FilesystemError unless `replace` is True. Has no
effect if `path` is the same as `dest`. Paths are translated to
@@ -476,12 +490,11 @@ def copy(path: bytes, dest: bytes, replace: bool = False):
path = syspath(path)
dest = syspath(dest)
if not replace and os.path.exists(dest):
- raise FilesystemError('file exists', 'copy', (path, dest))
+ raise FilesystemError("file exists", "copy", (path, dest))
try:
shutil.copyfile(path, dest)
except OSError as exc:
- raise FilesystemError(exc, 'copy', (path, dest),
- traceback.format_exc())
+ raise FilesystemError(exc, "copy", (path, dest), traceback.format_exc())
def move(path: bytes, dest: bytes, replace: bool = False):
@@ -493,14 +506,13 @@ def move(path: bytes, dest: bytes, replace: bool = False):
translated to system paths.
"""
if os.path.isdir(syspath(path)):
- raise FilesystemError(u'source is directory', 'move', (path, dest))
+ raise FilesystemError("source is directory", "move", (path, dest))
if os.path.isdir(syspath(dest)):
- raise FilesystemError(u'destination is directory', 'move',
- (path, dest))
+ raise FilesystemError("destination is directory", "move", (path, dest))
if samefile(path, dest):
return
if os.path.exists(syspath(dest)) and not replace:
- raise FilesystemError('file exists', 'rename', (path, dest))
+ raise FilesystemError("file exists", "rename", (path, dest))
# First, try renaming the file.
try:
@@ -510,13 +522,13 @@ def move(path: bytes, dest: bytes, replace: bool = False):
basename = os.path.basename(bytestring_path(dest))
dirname = os.path.dirname(bytestring_path(dest))
tmp = tempfile.NamedTemporaryFile(
- suffix=syspath(b'.beets', prefix=False),
- prefix=syspath(b'.' + basename, prefix=False),
+ suffix=syspath(b".beets", prefix=False),
+ prefix=syspath(b"." + basename, prefix=False),
dir=syspath(dirname),
delete=False,
)
try:
- with open(syspath(path), 'rb') as f:
+ with open(syspath(path), "rb") as f:
shutil.copyfileobj(f, tmp)
finally:
tmp.close()
@@ -527,8 +539,9 @@ def move(path: bytes, dest: bytes, replace: bool = False):
tmp = None
os.remove(syspath(path))
except OSError as exc:
- raise FilesystemError(exc, 'move', (path, dest),
- traceback.format_exc())
+ raise FilesystemError(
+ exc, "move", (path, dest), traceback.format_exc()
+ )
finally:
if tmp is not None:
os.remove(tmp)
@@ -543,16 +556,18 @@ def link(path: bytes, dest: bytes, replace: bool = False):
return
if os.path.exists(syspath(dest)) and not replace:
- raise FilesystemError('file exists', 'rename', (path, dest))
+ raise FilesystemError("file exists", "rename", (path, dest))
try:
os.symlink(syspath(path), syspath(dest))
except NotImplementedError:
# raised on python >= 3.2 and Windows versions before Vista
- raise FilesystemError('OS does not support symbolic links.'
- 'link', (path, dest), traceback.format_exc())
+ raise FilesystemError(
+ "OS does not support symbolic links." "link",
+ (path, dest),
+ traceback.format_exc(),
+ )
except OSError as exc:
- raise FilesystemError(exc, 'link', (path, dest),
- traceback.format_exc())
+ raise FilesystemError(exc, "link", (path, dest), traceback.format_exc())
def hardlink(path: bytes, dest: bytes, replace: bool = False):
@@ -564,26 +579,33 @@ def hardlink(path: bytes, dest: bytes, replace: bool = False):
return
if os.path.exists(syspath(dest)) and not replace:
- raise FilesystemError('file exists', 'rename', (path, dest))
+ raise FilesystemError("file exists", "rename", (path, dest))
try:
os.link(syspath(path), syspath(dest))
except NotImplementedError:
- raise FilesystemError('OS does not support hard links.'
- 'link', (path, dest), traceback.format_exc())
+ raise FilesystemError(
+ "OS does not support hard links." "link",
+ (path, dest),
+ traceback.format_exc(),
+ )
except OSError as exc:
if exc.errno == errno.EXDEV:
- raise FilesystemError('Cannot hard link across devices.'
- 'link', (path, dest), traceback.format_exc())
+ raise FilesystemError(
+ "Cannot hard link across devices." "link",
+ (path, dest),
+ traceback.format_exc(),
+ )
else:
- raise FilesystemError(exc, 'link', (path, dest),
- traceback.format_exc())
+ raise FilesystemError(
+ exc, "link", (path, dest), traceback.format_exc()
+ )
def reflink(
- path: bytes,
- dest: bytes,
- replace: bool = False,
- fallback: bool = False,
+ path: bytes,
+ dest: bytes,
+ replace: bool = False,
+ fallback: bool = False,
):
"""Create a reflink from `dest` to `path`.
@@ -601,7 +623,7 @@ def reflink(
return
if os.path.exists(syspath(dest)) and not replace:
- raise FilesystemError('file exists', 'rename', (path, dest))
+ raise FilesystemError("file exists", "rename", (path, dest))
try:
pyreflink.reflink(path, dest)
@@ -609,8 +631,12 @@ def reflink(
if fallback:
copy(path, dest, replace)
else:
- raise FilesystemError('OS/filesystem does not support reflinks.',
- 'link', (path, dest), traceback.format_exc())
+ raise FilesystemError(
+ "OS/filesystem does not support reflinks.",
+ "link",
+ (path, dest),
+ traceback.format_exc(),
+ )
def unique_path(path: bytes) -> bytes:
@@ -622,15 +648,15 @@ def unique_path(path: bytes) -> bytes:
return path
base, ext = os.path.splitext(path)
- match = re.search(br'\.(\d)+$', base)
+ match = re.search(rb"\.(\d)+$", base)
if match:
num = int(match.group(1))
- base = base[:match.start()]
+ base = base[: match.start()]
else:
num = 0
while True:
num += 1
- suffix = f'.{num}'.encode() + ext
+ suffix = f".{num}".encode() + ext
new_path = base + suffix
if not os.path.exists(new_path):
return new_path
@@ -641,18 +667,18 @@ def unique_path(path: bytes) -> bytes:
# shares, which are sufficiently common as to cause frequent problems.
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx
CHAR_REPLACE: List[Tuple[Pattern, str]] = [
- (re.compile(r'[\\/]'), '_'), # / and \ -- forbidden everywhere.
- (re.compile(r'^\.'), '_'), # Leading dot (hidden files on Unix).
- (re.compile(r'[\x00-\x1f]'), ''), # Control characters.
- (re.compile(r'[<>:"\?\*\|]'), '_'), # Windows "reserved characters".
- (re.compile(r'\.$'), '_'), # Trailing dots.
- (re.compile(r'\s+$'), ''), # Trailing whitespace.
+ (re.compile(r"[\\/]"), "_"), # / and \ -- forbidden everywhere.
+ (re.compile(r"^\."), "_"), # Leading dot (hidden files on Unix).
+ (re.compile(r"[\x00-\x1f]"), ""), # Control characters.
+ (re.compile(r'[<>:"\?\*\|]'), "_"), # Windows "reserved characters".
+ (re.compile(r"\.$"), "_"), # Trailing dots.
+ (re.compile(r"\s+$"), ""), # Trailing whitespace.
]
def sanitize_path(
- path: str,
- replacements: Optional[Sequence[Sequence[Union[Pattern, str]]]] = None,
+ path: str,
+ replacements: Optional[Sequence[Sequence[Union[Pattern, str]]]] = None,
) -> str:
"""Takes a path (as a Unicode string) and makes sure that it is
legal. Returns a new path. Only works with fragments; won't work
@@ -666,7 +692,7 @@ def sanitize_path(
comps = components(path)
if not comps:
- return ''
+ return ""
for i, comp in enumerate(comps):
for regex, repl in replacements:
comp = regex.sub(repl, comp)
@@ -685,18 +711,18 @@ def truncate_path(path: AnyStr, length: int = MAX_FILENAME_LENGTH) -> AnyStr:
base, ext = os.path.splitext(comps[-1])
if ext:
# Last component has an extension.
- base = base[:length - len(ext)]
+ base = base[: length - len(ext)]
out[-1] = base + ext
return os.path.join(*out)
def _legalize_stage(
- path: str,
- replacements: Optional[Sequence[Sequence[Union[Pattern, str]]]],
- length: int,
- extension: str,
- fragment: bool,
+ path: str,
+ replacements: Optional[Sequence[Sequence[Union[Pattern, str]]]],
+ length: int,
+ extension: str,
+ fragment: bool,
) -> Tuple[Bytes_or_String, bool]:
"""Perform a single round of path legalization steps
(sanitation/replacement, encoding from Unicode to bytes,
@@ -722,11 +748,11 @@ def _legalize_stage(
def legalize_path(
- path: str,
- replacements: Optional[Sequence[Sequence[Union[Pattern, str]]]],
- length: int,
- extension: bytes,
- fragment: bool,
+ path: str,
+ replacements: Optional[Sequence[Sequence[Union[Pattern, str]]]],
+ length: int,
+ extension: bytes,
+ fragment: bool,
) -> Tuple[Union[Bytes_or_String, bool]]:
"""Given a path-like Unicode string, produce a legal path. Return
the path and a flag indicating whether some replacements had to be
@@ -751,7 +777,7 @@ def legalize_path(
if fragment:
# Outputting Unicode.
- extension = extension.decode('utf-8', 'ignore')
+ extension = extension.decode("utf-8", "ignore")
first_stage_path, _ = _legalize_stage(
path, replacements, length, extension, fragment
@@ -792,7 +818,7 @@ def py3_path(path: AnyStr) -> str:
def str2bool(value: str) -> bool:
"""Returns a boolean reflecting a human-entered string."""
- return value.lower() in ('yes', '1', 'true', 't', 'y')
+ return value.lower() in ("yes", "1", "true", "t", "y")
def as_string(value: Any) -> str:
@@ -800,11 +826,11 @@ def as_string(value: Any) -> str:
None becomes the empty string. Bytestrings are silently decoded.
"""
if value is None:
- return ''
+ return ""
elif isinstance(value, memoryview):
- return bytes(value).decode('utf-8', 'ignore')
+ return bytes(value).decode("utf-8", "ignore")
elif isinstance(value, bytes):
- return value.decode('utf-8', 'ignore')
+ return value.decode("utf-8", "ignore")
else:
return str(value)
@@ -816,7 +842,7 @@ def plurality(objs: Sequence[T]) -> T:
"""
c = Counter(objs)
if not c:
- raise ValueError('sequence must be non-empty')
+ raise ValueError("sequence must be non-empty")
return c.most_common(1)[0]
@@ -826,23 +852,27 @@ def cpu_count() -> int:
"""
# Adapted from the soundconverter project:
# https://github.com/kassoulet/soundconverter
- if sys.platform == 'win32':
+ if sys.platform == "win32":
try:
- num = int(os.environ['NUMBER_OF_PROCESSORS'])
+ num = int(os.environ["NUMBER_OF_PROCESSORS"])
except (ValueError, KeyError):
num = 0
- elif sys.platform == 'darwin':
+ elif sys.platform == "darwin":
try:
- num = int(command_output([
- '/usr/sbin/sysctl',
- '-n',
- 'hw.ncpu',
- ]).stdout)
+ num = int(
+ command_output(
+ [
+ "/usr/sbin/sysctl",
+ "-n",
+ "hw.ncpu",
+ ]
+ ).stdout
+ )
except (ValueError, OSError, subprocess.CalledProcessError):
num = 0
else:
try:
- num = os.sysconf('SC_NPROCESSORS_ONLN')
+ num = os.sysconf("SC_NPROCESSORS_ONLN")
except (ValueError, OSError, AttributeError):
num = 0
if num >= 1:
@@ -853,7 +883,7 @@ def cpu_count() -> int:
def convert_command_args(args: List[bytes]) -> List[str]:
"""Convert command arguments, which may either be `bytes` or `str`
- objects, to uniformly surrogate-escaped strings. """
+ objects, to uniformly surrogate-escaped strings."""
assert isinstance(args, list)
def convert(arg) -> str:
@@ -869,8 +899,8 @@ def convert(arg) -> str:
def command_output(
- cmd: List[Bytes_or_String],
- shell: bool = False,
+ cmd: List[Bytes_or_String],
+ shell: bool = False,
) -> CommandOutput:
"""Runs the command and returns its output after it has exited.
@@ -898,14 +928,14 @@ def command_output(
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=devnull,
- close_fds=platform.system() != 'Windows',
- shell=shell
+ close_fds=platform.system() != "Windows",
+ shell=shell,
)
stdout, stderr = proc.communicate()
if proc.returncode:
raise subprocess.CalledProcessError(
returncode=proc.returncode,
- cmd=' '.join(cmd),
+ cmd=" ".join(cmd),
output=stdout + stderr,
)
return CommandOutput(stdout, stderr)
@@ -918,7 +948,7 @@ def max_filename_length(path: AnyStr, limit=MAX_FILENAME_LENGTH) -> int:
misreports its capacity). If it cannot be determined (e.g., on
Windows), return `limit`.
"""
- if hasattr(os, 'statvfs'):
+ if hasattr(os, "statvfs"):
try:
res = os.statvfs(path)
except OSError:
@@ -933,12 +963,12 @@ def open_anything() -> str:
program.
"""
sys_name = platform.system()
- if sys_name == 'Darwin':
- base_cmd = 'open'
- elif sys_name == 'Windows':
- base_cmd = 'start'
+ if sys_name == "Darwin":
+ base_cmd = "open"
+ elif sys_name == "Windows":
+ base_cmd = "start"
else: # Assume Unix
- base_cmd = 'xdg-open'
+ base_cmd = "xdg-open"
return base_cmd
@@ -949,7 +979,7 @@ def editor_command() -> str:
present, fall back to `open_anything()`, the platform-specific tool
for opening files in general.
"""
- editor = os.environ.get('EDITOR')
+ editor = os.environ.get("EDITOR")
if editor:
return editor
return open_anything()
@@ -994,7 +1024,7 @@ def case_sensitive(path: bytes) -> bool:
if head == path:
# We have reached the root of the file system.
# By default, the case sensitivity depends on the platform.
- return platform.system() != 'Windows'
+ return platform.system() != "Windows"
# Trailing path separator, or path does not exist.
if not tail or not os.path.exists(path):
@@ -1031,9 +1061,9 @@ def raw_seconds_short(string: str) -> float:
Raises ValueError if the conversion cannot take place due to `string` not
being in the right format.
"""
- match = re.match(r'^(\d+):([0-5]\d)$', string)
+ match = re.match(r"^(\d+):([0-5]\d)$", string)
if not match:
- raise ValueError('String not in M:SS format')
+ raise ValueError("String not in M:SS format")
minutes, seconds = map(int, match.groups())
return float(minutes * 60 + seconds)
@@ -1056,8 +1086,7 @@ def asciify_path(path: str, sep_replace: str) -> str:
path_components[index] = unidecode(item).replace(os.sep, sep_replace)
if os.altsep:
path_components[index] = unidecode(item).replace(
- os.altsep,
- sep_replace
+ os.altsep, sep_replace
)
return os.sep.join(path_components)
@@ -1084,7 +1113,7 @@ def lazy_property(func: Callable) -> Callable:
This behaviour is useful when `func` is expensive to evaluate, and it is
not certain that the result will be needed.
"""
- field_name = '_' + func.__name__
+ field_name = "_" + func.__name__
@property
@functools.wraps(func)
diff --git a/beets/util/artresizer.py b/beets/util/artresizer.py
index 94f8bcb5db..16a66a74d9 100644
--- a/beets/util/artresizer.py
+++ b/beets/util/artresizer.py
@@ -16,21 +16,21 @@
public resizing proxy if neither is available.
"""
-from itertools import chain
-import subprocess
import os
import os.path
import platform
import re
+import subprocess
+from itertools import chain
from tempfile import NamedTemporaryFile
from urllib.parse import urlencode
-from beets import logging
-from beets import util
+
+from beets import logging, util
from beets.util import bytestring_path, displayable_path, py3_path, syspath
-PROXY_URL = 'https://images.weserv.nl/'
+PROXY_URL = "https://images.weserv.nl/"
-log = logging.getLogger('beets')
+log = logging.getLogger("beets")
def resize_url(url, maxwidth, quality=0):
@@ -38,14 +38,14 @@ def resize_url(url, maxwidth, quality=0):
maxwidth (preserving aspect ratio).
"""
params = {
- 'url': url.replace('http://', ''),
- 'w': maxwidth,
+ "url": url.replace("http://", ""),
+ "w": maxwidth,
}
if quality > 0:
- params['q'] = quality
+ params["q"] = quality
- return '{}?{}'.format(PROXY_URL, urlencode(params))
+ return "{}?{}".format(PROXY_URL, urlencode(params))
def temp_file_for(path):
@@ -90,20 +90,22 @@ def version(cls):
Raises `LocalBackendNotAvailableError` if not available.
"""
if cls._version is None:
- for cmd_name, legacy in (('magick', False), ('convert', True)):
+ for cmd_name, legacy in (("magick", False), ("convert", True)):
try:
out = util.command_output([cmd_name, "--version"]).stdout
except (subprocess.CalledProcessError, OSError) as exc:
- log.debug('ImageMagick version check failed: {}', exc)
+ log.debug("ImageMagick version check failed: {}", exc)
cls._version = _NOT_AVAILABLE
else:
- if b'imagemagick' in out.lower():
- pattern = br".+ (\d+)\.(\d+)\.(\d+).*"
+ if b"imagemagick" in out.lower():
+ pattern = rb".+ (\d+)\.(\d+)\.(\d+).*"
match = re.search(pattern, out)
if match:
- cls._version = (int(match.group(1)),
- int(match.group(2)),
- int(match.group(3)))
+ cls._version = (
+ int(match.group(1)),
+ int(match.group(2)),
+ int(match.group(3)),
+ )
cls._legacy = legacy
if cls._version is _NOT_AVAILABLE:
@@ -123,24 +125,28 @@ def __init__(self):
# If it's not, fall back to the older, separate convert
# and identify commands.
if self._legacy:
- self.convert_cmd = ['convert']
- self.identify_cmd = ['identify']
- self.compare_cmd = ['compare']
+ self.convert_cmd = ["convert"]
+ self.identify_cmd = ["identify"]
+ self.compare_cmd = ["compare"]
else:
- self.convert_cmd = ['magick']
- self.identify_cmd = ['magick', 'identify']
- self.compare_cmd = ['magick', 'compare']
+ self.convert_cmd = ["magick"]
+ self.identify_cmd = ["magick", "identify"]
+ self.compare_cmd = ["magick", "compare"]
- def resize(self, maxwidth, path_in, path_out=None, quality=0,
- max_filesize=0):
+ def resize(
+ self, maxwidth, path_in, path_out=None, quality=0, max_filesize=0
+ ):
"""Resize using ImageMagick.
Use the ``magick`` program or ``convert`` on older versions. Return
the output path of resized image.
"""
path_out = path_out or temp_file_for(path_in)
- log.debug('artresizer: ImageMagick resizing {0} to {1}',
- displayable_path(path_in), displayable_path(path_out))
+ log.debug(
+ "artresizer: ImageMagick resizing {0} to {1}",
+ displayable_path(path_in),
+ displayable_path(path_out),
+ )
# "-resize WIDTHx>" shrinks images with the width larger
# than the given width while maintaining the aspect ratio
@@ -149,48 +155,56 @@ def resize(self, maxwidth, path_in, path_out=None, quality=0,
# it here for the sake of explicitness.
cmd = self.convert_cmd + [
syspath(path_in, prefix=False),
- '-resize', f'{maxwidth}x>',
- '-interlace', 'none',
+ "-resize",
+ f"{maxwidth}x>",
+ "-interlace",
+ "none",
]
if quality > 0:
- cmd += ['-quality', f'{quality}']
+ cmd += ["-quality", f"{quality}"]
# "-define jpeg:extent=SIZEb" sets the target filesize for imagemagick
# to SIZE in bytes.
if max_filesize > 0:
- cmd += ['-define', f'jpeg:extent={max_filesize}b']
+ cmd += ["-define", f"jpeg:extent={max_filesize}b"]
cmd.append(syspath(path_out, prefix=False))
try:
util.command_output(cmd)
except subprocess.CalledProcessError:
- log.warning('artresizer: IM convert failed for {0}',
- displayable_path(path_in))
+ log.warning(
+ "artresizer: IM convert failed for {0}",
+ displayable_path(path_in),
+ )
return path_in
return path_out
def get_size(self, path_in):
cmd = self.identify_cmd + [
- '-format', '%w %h', syspath(path_in, prefix=False)
+ "-format",
+ "%w %h",
+ syspath(path_in, prefix=False),
]
try:
out = util.command_output(cmd).stdout
except subprocess.CalledProcessError as exc:
- log.warning('ImageMagick size query failed')
+ log.warning("ImageMagick size query failed")
log.debug(
- '`convert` exited with (status {}) when '
- 'getting size with command {}:\n{}',
- exc.returncode, cmd, exc.output.strip()
+ "`convert` exited with (status {}) when "
+ "getting size with command {}:\n{}",
+ exc.returncode,
+ cmd,
+ exc.output.strip(),
)
return None
try:
- return tuple(map(int, out.split(b' ')))
+ return tuple(map(int, out.split(b" ")))
except IndexError:
- log.warning('Could not understand IM output: {0!r}', out)
+ log.warning("Could not understand IM output: {0!r}", out)
return None
def deinterlace(self, path_in, path_out=None):
@@ -198,7 +212,8 @@ def deinterlace(self, path_in, path_out=None):
cmd = self.convert_cmd + [
syspath(path_in, prefix=False),
- '-interlace', 'none',
+ "-interlace",
+ "none",
syspath(path_out, prefix=False),
]
@@ -210,10 +225,7 @@ def deinterlace(self, path_in, path_out=None):
return path_in
def get_format(self, filepath):
- cmd = self.identify_cmd + [
- '-format', '%[magick]',
- syspath(filepath)
- ]
+ cmd = self.identify_cmd + ["-format", "%[magick]", syspath(filepath)]
try:
return util.command_output(cmd).stdout
@@ -230,9 +242,7 @@ def convert_format(self, source, target, deinterlaced):
try:
subprocess.check_call(
- cmd,
- stderr=subprocess.DEVNULL,
- stdout=subprocess.DEVNULL
+ cmd, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL
)
return target
except subprocess.CalledProcessError:
@@ -252,15 +262,23 @@ def compare(self, im1, im2, compare_threshold):
# On Windows, ImageMagick doesn't support the magic \\?\ prefix
# on paths, so we pass `prefix=False` to `syspath`.
convert_cmd = self.convert_cmd + [
- syspath(im2, prefix=False), syspath(im1, prefix=False),
- '-colorspace', 'gray', 'MIFF:-'
+ syspath(im2, prefix=False),
+ syspath(im1, prefix=False),
+ "-colorspace",
+ "gray",
+ "MIFF:-",
]
compare_cmd = self.compare_cmd + [
- '-define', 'phash:colorspaces=sRGB,HCLp',
- '-metric', 'PHASH', '-', 'null:',
+ "-define",
+ "phash:colorspaces=sRGB,HCLp",
+ "-metric",
+ "PHASH",
+ "-",
+ "null:",
]
- log.debug('comparing images with pipeline {} | {}',
- convert_cmd, compare_cmd)
+ log.debug(
+ "comparing images with pipeline {} | {}", convert_cmd, compare_cmd
+ )
convert_proc = subprocess.Popen(
convert_cmd,
stdout=subprocess.PIPE,
@@ -283,7 +301,7 @@ def compare(self, im1, im2, compare_threshold):
convert_proc.wait()
if convert_proc.returncode:
log.debug(
- 'ImageMagick convert failed with status {}: {!r}',
+ "ImageMagick convert failed with status {}: {!r}",
convert_proc.returncode,
convert_stderr,
)
@@ -293,8 +311,11 @@ def compare(self, im1, im2, compare_threshold):
stdout, stderr = compare_proc.communicate()
if compare_proc.returncode:
if compare_proc.returncode != 1:
- log.debug('ImageMagick compare failed: {0}, {1}',
- displayable_path(im2), displayable_path(im1))
+ log.debug(
+ "ImageMagick compare failed: {0}, {1}",
+ displayable_path(im2),
+ displayable_path(im1),
+ )
return None
out_str = stderr
else:
@@ -303,10 +324,10 @@ def compare(self, im1, im2, compare_threshold):
try:
phash_diff = float(out_str)
except ValueError:
- log.debug('IM output is not a number: {0!r}', out_str)
+ log.debug("IM output is not a number: {0!r}", out_str)
return None
- log.debug('ImageMagick compare score: {0}', phash_diff)
+ log.debug("ImageMagick compare score: {0}", phash_diff)
return phash_diff <= compare_threshold
@property
@@ -314,9 +335,9 @@ def can_write_metadata(self):
return True
def write_metadata(self, file, metadata):
- assignments = list(chain.from_iterable(
- ('-set', k, v) for k, v in metadata.items()
- ))
+ assignments = list(
+ chain.from_iterable(("-set", k, v) for k, v in metadata.items())
+ )
command = self.convert_cmd + [file, *assignments, file]
util.command_output(command)
@@ -328,7 +349,7 @@ class PILBackend(LocalBackend):
@classmethod
def version(cls):
try:
- __import__('PIL', fromlist=['Image'])
+ __import__("PIL", fromlist=["Image"])
except ImportError:
raise LocalBackendNotAvailableError()
@@ -339,16 +360,20 @@ def __init__(self):
"""
self.version()
- def resize(self, maxwidth, path_in, path_out=None, quality=0,
- max_filesize=0):
+ def resize(
+ self, maxwidth, path_in, path_out=None, quality=0, max_filesize=0
+ ):
"""Resize using Python Imaging Library (PIL). Return the output path
of resized image.
"""
path_out = path_out or temp_file_for(path_in)
from PIL import Image
- log.debug('artresizer: PIL resizing {0} to {1}',
- displayable_path(path_in), displayable_path(path_out))
+ log.debug(
+ "artresizer: PIL resizing {0} to {1}",
+ displayable_path(path_in),
+ displayable_path(path_out),
+ )
try:
im = Image.open(syspath(path_in))
@@ -384,17 +409,24 @@ def resize(self, maxwidth, path_in, path_out=None, quality=0,
if lower_qual < 10:
lower_qual = 10
# Use optimize flag to improve filesize decrease
- im.save(py3_path(path_out), quality=lower_qual,
- optimize=True, progressive=False)
- log.warning("PIL Failed to resize file to below {0}B",
- max_filesize)
+ im.save(
+ py3_path(path_out),
+ quality=lower_qual,
+ optimize=True,
+ progressive=False,
+ )
+ log.warning(
+ "PIL Failed to resize file to below {0}B", max_filesize
+ )
return path_out
else:
return path_out
except OSError:
- log.error("PIL cannot create thumbnail for '{0}'",
- displayable_path(path_in))
+ log.error(
+ "PIL cannot create thumbnail for '{0}'",
+ displayable_path(path_in),
+ )
return path_in
def get_size(self, path_in):
@@ -404,8 +436,9 @@ def get_size(self, path_in):
im = Image.open(syspath(path_in))
return im.size
except OSError as exc:
- log.error("PIL could not read file {}: {}",
- displayable_path(path_in), exc)
+ log.error(
+ "PIL could not read file {}: {}", displayable_path(path_in), exc
+ )
return None
def deinterlace(self, path_in, path_out=None):
@@ -426,8 +459,12 @@ def get_format(self, filepath):
try:
with Image.open(syspath(filepath)) as im:
return im.format
- except (ValueError, TypeError, UnidentifiedImageError,
- FileNotFoundError):
+ except (
+ ValueError,
+ TypeError,
+ UnidentifiedImageError,
+ FileNotFoundError,
+ ):
log.exception("failed to detect image format for {}", filepath)
return None
@@ -438,8 +475,13 @@ def convert_format(self, source, target, deinterlaced):
with Image.open(syspath(source)) as im:
im.save(py3_path(target), progressive=not deinterlaced)
return target
- except (ValueError, TypeError, UnidentifiedImageError,
- FileNotFoundError, OSError):
+ except (
+ ValueError,
+ TypeError,
+ UnidentifiedImageError,
+ FileNotFoundError,
+ OSError,
+ ):
log.exception("failed to convert image {} -> {}", source, target)
return source
@@ -492,12 +534,10 @@ def shared(cls):
class ArtResizer(metaclass=Shareable):
- """A singleton class that performs image resizes.
- """
+ """A singleton class that performs image resizes."""
def __init__(self):
- """Create a resizer object with an inferred method.
- """
+ """Create a resizer object with an inferred method."""
# Check if a local backend is available, and store an instance of the
# backend class. Otherwise, fallback to the web proxy.
for backend_cls in BACKEND_CLASSES:
@@ -528,8 +568,11 @@ def resize(
"""
if self.local:
return self.local_method.resize(
- maxwidth, path_in, path_out,
- quality=quality, max_filesize=max_filesize
+ maxwidth,
+ path_in,
+ path_out,
+ quality=quality,
+ max_filesize=max_filesize,
)
else:
# Handled by `proxy_url` already.
@@ -600,11 +643,11 @@ def reformat(self, path_in, new_format, deinterlaced=True):
new_format = new_format.lower()
# A nonexhaustive map of image "types" to extensions overrides
new_format = {
- 'jpeg': 'jpg',
+ "jpeg": "jpg",
}.get(new_format, new_format)
fname, ext = os.path.splitext(path_in)
- path_new = fname + b'.' + new_format.encode('utf8')
+ path_new = fname + b"." + new_format.encode("utf8")
# allows the exception to propagate, while still making sure a changed
# file path was removed
diff --git a/beets/util/bluelet.py b/beets/util/bluelet.py
index a40f3b2f73..db34486b5c 100644
--- a/beets/util/bluelet.py
+++ b/beets/util/bluelet.py
@@ -6,23 +6,24 @@
Bluelet: easy concurrency without all the messy parallelism.
"""
-import socket
+import collections
+import errno
import select
+import socket
import sys
-import types
-import errno
-import traceback
import time
-import collections
-
+import traceback
+import types
# Basic events used for thread scheduling.
+
class Event:
"""Just a base class identifying Bluelet events. An event is an
object yielded from a Bluelet thread coroutine to suspend operation
and communicate with the scheduler.
"""
+
pass
@@ -31,6 +32,7 @@ class WaitableEvent(Event):
waited for using a select() call. That is, it's an event with an
associated file descriptor.
"""
+
def waitables(self):
"""Return "waitable" objects to pass to select(). Should return
three iterables for input readiness, output readiness, and
@@ -48,18 +50,21 @@ def fire(self):
class ValueEvent(Event):
"""An event that does nothing but return a fixed value."""
+
def __init__(self, value):
self.value = value
class ExceptionEvent(Event):
"""Raise an exception at the yield point. Used internally."""
+
def __init__(self, exc_info):
self.exc_info = exc_info
class SpawnEvent(Event):
"""Add a new coroutine thread to the scheduler."""
+
def __init__(self, coro):
self.spawned = coro
@@ -68,12 +73,14 @@ class JoinEvent(Event):
"""Suspend the thread until the specified child thread has
completed.
"""
+
def __init__(self, child):
self.child = child
class KillEvent(Event):
"""Unschedule a child thread."""
+
def __init__(self, child):
self.child = child
@@ -83,6 +90,7 @@ class DelegationEvent(Event):
once the child thread finished, return control to the parent
thread.
"""
+
def __init__(self, coro):
self.spawned = coro
@@ -91,13 +99,14 @@ class ReturnEvent(Event):
"""Return a value the current thread's delegator at the point of
delegation. Ends the current (delegate) thread.
"""
+
def __init__(self, value):
self.value = value
class SleepEvent(WaitableEvent):
- """Suspend the thread for a given duration.
- """
+ """Suspend the thread for a given duration."""
+
def __init__(self, duration):
self.wakeup_time = time.time() + duration
@@ -107,6 +116,7 @@ def time_left(self):
class ReadEvent(WaitableEvent):
"""Reads from a file-like object."""
+
def __init__(self, fd, bufsize):
self.fd = fd
self.bufsize = bufsize
@@ -120,6 +130,7 @@ def fire(self):
class WriteEvent(WaitableEvent):
"""Writes to a file-like object."""
+
def __init__(self, fd, data):
self.fd = fd
self.data = data
@@ -133,6 +144,7 @@ def fire(self):
# Core logic for executing and scheduling threads.
+
def _event_select(events):
"""Perform a select() over all the Events provided, returning the
ones ready to be fired. Only WaitableEvents (including SleepEvents)
@@ -154,11 +166,11 @@ def _event_select(events):
wlist += w
xlist += x
for waitable in r:
- waitable_to_event[('r', waitable)] = event
+ waitable_to_event[("r", waitable)] = event
for waitable in w:
- waitable_to_event[('w', waitable)] = event
+ waitable_to_event[("w", waitable)] = event
for waitable in x:
- waitable_to_event[('x', waitable)] = event
+ waitable_to_event[("x", waitable)] = event
# If we have a any sleeping threads, determine how long to sleep.
if earliest_wakeup:
@@ -177,11 +189,11 @@ def _event_select(events):
# Gather ready events corresponding to the ready waitables.
ready_events = set()
for ready in rready:
- ready_events.add(waitable_to_event[('r', ready)])
+ ready_events.add(waitable_to_event[("r", ready)])
for ready in wready:
- ready_events.add(waitable_to_event[('w', ready)])
+ ready_events.add(waitable_to_event[("w", ready)])
for ready in xready:
- ready_events.add(waitable_to_event[('x', ready)])
+ ready_events.add(waitable_to_event[("x", ready)])
# Gather any finished sleeps.
for event in events:
@@ -207,6 +219,7 @@ class Delegated(Event):
"""Placeholder indicating that a thread has delegated execution to a
different thread.
"""
+
def __init__(self, child):
self.child = child
@@ -277,8 +290,7 @@ def advance_thread(coro, value, is_exc=False):
threads[coro] = next_event
def kill_thread(coro):
- """Unschedule this thread and its (recursive) delegates.
- """
+ """Unschedule this thread and its (recursive) delegates."""
# Collect all coroutines in the delegation stack.
coros = [coro]
while isinstance(threads[coro], Delegated):
@@ -338,12 +350,16 @@ def kill_thread(coro):
try:
value = event.fire()
except OSError as exc:
- if isinstance(exc.args, tuple) and \
- exc.args[0] == errno.EPIPE:
+ if (
+ isinstance(exc.args, tuple)
+ and exc.args[0] == errno.EPIPE
+ ):
# Broken pipe. Remote host disconnected.
pass
- elif isinstance(exc.args, tuple) and \
- exc.args[0] == errno.ECONNRESET:
+ elif (
+ isinstance(exc.args, tuple)
+ and exc.args[0] == errno.ECONNRESET
+ ):
# Connection was reset by peer.
pass
else:
@@ -382,16 +398,16 @@ def kill_thread(coro):
# Sockets and their associated events.
+
class SocketClosedError(Exception):
pass
class Listener:
- """A socket wrapper object for listening sockets.
- """
+ """A socket wrapper object for listening sockets."""
+
def __init__(self, host, port):
- """Create a listening socket on the given hostname and port.
- """
+ """Create a listening socket on the given hostname and port."""
self._closed = False
self.host = host
self.port = port
@@ -410,19 +426,18 @@ def accept(self):
return AcceptEvent(self)
def close(self):
- """Immediately close the listening socket. (Not an event.)
- """
+ """Immediately close the listening socket. (Not an event.)"""
self._closed = True
self.sock.close()
class Connection:
- """A socket wrapper object for connected sockets.
- """
+ """A socket wrapper object for connected sockets."""
+
def __init__(self, sock, addr):
self.sock = sock
self.addr = addr
- self._buf = b''
+ self._buf = b""
self._closed = False
def close(self):
@@ -473,7 +488,7 @@ def readline(self, terminator=b"\n", bufsize=1024):
self._buf += data
else:
line = self._buf
- self._buf = b''
+ self._buf = b""
yield ReturnEvent(line)
break
@@ -482,6 +497,7 @@ class AcceptEvent(WaitableEvent):
"""An event for Listener objects (listening sockets) that suspends
execution until the socket gets a connection.
"""
+
def __init__(self, listener):
self.listener = listener
@@ -497,6 +513,7 @@ class ReceiveEvent(WaitableEvent):
"""An event for Connection objects (connected sockets) for
asynchronously reading data.
"""
+
def __init__(self, conn, bufsize):
self.conn = conn
self.bufsize = bufsize
@@ -512,6 +529,7 @@ class SendEvent(WaitableEvent):
"""An event for Connection objects (connected sockets) for
asynchronously writing data.
"""
+
def __init__(self, conn, data, sendall=False):
self.conn = conn
self.data = data
@@ -530,9 +548,9 @@ def fire(self):
# Public interface for threads; each returns an event object that
# can immediately be "yield"ed.
+
def null():
- """Event: yield to the scheduler without doing anything special.
- """
+ """Event: yield to the scheduler without doing anything special."""
return ValueEvent(None)
@@ -541,7 +559,7 @@ def spawn(coro):
and child coroutines run concurrently.
"""
if not isinstance(coro, types.GeneratorType):
- raise ValueError('%s is not a coroutine' % coro)
+ raise ValueError("%s is not a coroutine" % coro)
return SpawnEvent(coro)
@@ -551,7 +569,7 @@ def call(coro):
returns a value using end(), then this event returns that value.
"""
if not isinstance(coro, types.GeneratorType):
- raise ValueError('%s is not a coroutine' % coro)
+ raise ValueError("%s is not a coroutine" % coro)
return DelegationEvent(coro)
@@ -573,7 +591,8 @@ def reader():
if not data:
break
buf.append(data)
- yield ReturnEvent(''.join(buf))
+ yield ReturnEvent("".join(buf))
+
return DelegationEvent(reader())
else:
@@ -595,8 +614,7 @@ def connect(host, port):
def sleep(duration):
- """Event: suspend the thread for ``duration`` seconds.
- """
+ """Event: suspend the thread for ``duration`` seconds."""
return SleepEvent(duration)
@@ -608,19 +626,20 @@ def join(coro):
def kill(coro):
- """Halt the execution of a different `spawn`ed thread.
- """
+ """Halt the execution of a different `spawn`ed thread."""
return KillEvent(coro)
# Convenience function for running socket servers.
+
def server(host, port, func):
"""A coroutine that runs a network server. Host and port specify the
listening address. func should be a coroutine that takes a single
parameter, a Connection object. The coroutine is invoked for every
incoming connection on the listening socket.
"""
+
def handler(conn):
try:
yield func(conn)
diff --git a/beets/util/confit.py b/beets/util/confit.py
index 927a9f0873..db72d8b8c4 100644
--- a/beets/util/confit.py
+++ b/beets/util/confit.py
@@ -13,9 +13,10 @@
# included in all copies or substantial portions of the Software.
+import warnings
+
import confuse
-import warnings
warnings.warn(
"beets.util.confit is deprecated; use confuse instead",
# Show the location of the `import confit` statement as the warning's
@@ -26,7 +27,7 @@
# Import everything from the confuse module into this module.
for key, value in confuse.__dict__.items():
- if key not in ['__name__']:
+ if key not in ["__name__"]:
globals()[key] = value
diff --git a/beets/util/enumeration.py b/beets/util/enumeration.py
index e49f6fddb6..33a6be58fb 100644
--- a/beets/util/enumeration.py
+++ b/beets/util/enumeration.py
@@ -20,6 +20,7 @@ class OrderedEnum(Enum):
"""
An Enum subclass that allows comparison of members.
"""
+
def __ge__(self, other):
if self.__class__ is other.__class__:
return self.value >= other.value
diff --git a/beets/util/functemplate.py b/beets/util/functemplate.py
index 809207b9a7..e511c5d971 100644
--- a/beets/util/functemplate.py
+++ b/beets/util/functemplate.py
@@ -27,22 +27,22 @@
"""
-import re
import ast
import dis
-import types
-import sys
import functools
+import re
+import sys
+import types
-SYMBOL_DELIM = '$'
-FUNC_DELIM = '%'
-GROUP_OPEN = '{'
-GROUP_CLOSE = '}'
-ARG_SEP = ','
-ESCAPE_CHAR = '$'
+SYMBOL_DELIM = "$"
+FUNC_DELIM = "%"
+GROUP_OPEN = "{"
+GROUP_CLOSE = "}"
+ARG_SEP = ","
+ESCAPE_CHAR = "$"
-VARIABLE_PREFIX = '__var_'
-FUNCTION_PREFIX = '__func_'
+VARIABLE_PREFIX = "__var_"
+FUNCTION_PREFIX = "__func_"
class Environment:
@@ -57,6 +57,7 @@ def __init__(self, values, functions):
# Code generation helpers.
+
def ex_lvalue(name):
"""A variable load expression."""
return ast.Name(name, ast.Store())
@@ -99,19 +100,19 @@ def ex_call(func, args):
return ast.Call(func, args, [])
-def compile_func(arg_names, statements, name='_the_func', debug=False):
+def compile_func(arg_names, statements, name="_the_func", debug=False):
"""Compile a list of statements as the body of a function and return
the resulting Python function. If `debug`, then print out the
bytecode of the compiled function.
"""
args_fields = {
- 'args': [ast.arg(arg=n, annotation=None) for n in arg_names],
- 'kwonlyargs': [],
- 'kw_defaults': [],
- 'defaults': [ex_literal(None) for _ in arg_names],
+ "args": [ast.arg(arg=n, annotation=None) for n in arg_names],
+ "kwonlyargs": [],
+ "kw_defaults": [],
+ "defaults": [ex_literal(None) for _ in arg_names],
}
- if 'posonlyargs' in ast.arguments._fields: # Added in Python 3.8.
- args_fields['posonlyargs'] = []
+ if "posonlyargs" in ast.arguments._fields: # Added in Python 3.8.
+ args_fields["posonlyargs"] = []
args = ast.arguments(**args_fields)
func_def = ast.FunctionDef(
@@ -130,7 +131,7 @@ def compile_func(arg_names, statements, name='_the_func', debug=False):
ast.fix_missing_locations(mod)
- prog = compile(mod, '', 'exec')
+ prog = compile(mod, "", "exec")
# Debug: show bytecode.
if debug:
@@ -146,6 +147,7 @@ def compile_func(arg_names, statements, name='_the_func', debug=False):
# AST nodes for the template language.
+
class Symbol:
"""A variable-substitution symbol in a template."""
@@ -154,7 +156,7 @@ def __init__(self, ident, original):
self.original = original
def __repr__(self):
- return 'Symbol(%s)' % repr(self.ident)
+ return "Symbol(%s)" % repr(self.ident)
def evaluate(self, env):
"""Evaluate the symbol in the environment, returning a Unicode
@@ -183,8 +185,9 @@ def __init__(self, ident, args, original):
self.original = original
def __repr__(self):
- return 'Call({}, {}, {})'.format(repr(self.ident), repr(self.args),
- repr(self.original))
+ return "Call({}, {}, {})".format(
+ repr(self.ident), repr(self.args), repr(self.original)
+ )
def evaluate(self, env):
"""Evaluate the function call in the environment, returning a
@@ -197,7 +200,7 @@ def evaluate(self, env):
except Exception as exc:
# Function raised exception! Maybe inlining the name of
# the exception will help debug.
- return '<%s>' % str(exc)
+ return "<%s>" % str(exc)
return str(out)
else:
return self.original
@@ -215,21 +218,22 @@ def translate(self):
# Create a subexpression that joins the result components of
# the arguments.
- arg_exprs.append(ex_call(
- ast.Attribute(ex_literal(''), 'join', ast.Load()),
- [ex_call(
- 'map',
+ arg_exprs.append(
+ ex_call(
+ ast.Attribute(ex_literal(""), "join", ast.Load()),
[
- ex_rvalue(str.__name__),
- ast.List(subexprs, ast.Load()),
- ]
- )],
- ))
-
- subexpr_call = ex_call(
- FUNCTION_PREFIX + self.ident,
- arg_exprs
- )
+ ex_call(
+ "map",
+ [
+ ex_rvalue(str.__name__),
+ ast.List(subexprs, ast.Load()),
+ ],
+ )
+ ],
+ )
+ )
+
+ subexpr_call = ex_call(FUNCTION_PREFIX + self.ident, arg_exprs)
return [subexpr_call], varnames, funcnames
@@ -242,7 +246,7 @@ def __init__(self, parts):
self.parts = parts
def __repr__(self):
- return 'Expression(%s)' % (repr(self.parts))
+ return "Expression(%s)" % (repr(self.parts))
def evaluate(self, env):
"""Evaluate the entire expression in the environment, returning
@@ -254,7 +258,7 @@ def evaluate(self, env):
out.append(part)
else:
out.append(part.evaluate(env))
- return ''.join(map(str, out))
+ return "".join(map(str, out))
def translate(self):
"""Compile the expression to a list of Python AST expressions, a
@@ -276,6 +280,7 @@ def translate(self):
# Parser.
+
class ParseError(Exception):
pass
@@ -295,7 +300,7 @@ class Parser:
"""
def __init__(self, string, in_argument=False):
- """ Create a new parser.
+ """Create a new parser.
:param in_arguments: boolean that indicates the parser is to be
used for parsing function arguments, ie. considering commas
(`ARG_SEP`) a special character
@@ -306,10 +311,16 @@ def __init__(self, string, in_argument=False):
self.parts = []
# Common parsing resources.
- special_chars = (SYMBOL_DELIM, FUNC_DELIM, GROUP_OPEN, GROUP_CLOSE,
- ESCAPE_CHAR)
- special_char_re = re.compile(r'[%s]|\Z' %
- ''.join(re.escape(c) for c in special_chars))
+ special_chars = (
+ SYMBOL_DELIM,
+ FUNC_DELIM,
+ GROUP_OPEN,
+ GROUP_CLOSE,
+ ESCAPE_CHAR,
+ )
+ special_char_re = re.compile(
+ r"[%s]|\Z" % "".join(re.escape(c) for c in special_chars)
+ )
escapable_chars = (SYMBOL_DELIM, FUNC_DELIM, GROUP_CLOSE, ARG_SEP)
terminator_chars = (GROUP_CLOSE,)
@@ -326,9 +337,10 @@ def parse_expression(self):
if self.in_argument:
extra_special_chars = (ARG_SEP,)
special_char_re = re.compile(
- r'[%s]|\Z' % ''.join(
- re.escape(c) for c in
- self.special_chars + extra_special_chars
+ r"[%s]|\Z"
+ % "".join(
+ re.escape(c)
+ for c in self.special_chars + extra_special_chars
)
)
@@ -341,10 +353,10 @@ def parse_expression(self):
# A non-special character. Skip to the next special
# character, treating the interstice as literal text.
next_pos = (
- special_char_re.search(
- self.string[self.pos:]).start() + self.pos
+ special_char_re.search(self.string[self.pos :]).start()
+ + self.pos
)
- text_parts.append(self.string[self.pos:next_pos])
+ text_parts.append(self.string[self.pos : next_pos])
self.pos = next_pos
continue
@@ -358,8 +370,9 @@ def parse_expression(self):
break
next_char = self.string[self.pos + 1]
- if char == ESCAPE_CHAR and next_char in (self.escapable_chars +
- extra_special_chars):
+ if char == ESCAPE_CHAR and next_char in (
+ self.escapable_chars + extra_special_chars
+ ):
# An escaped special character ($$, $}, etc.). Note that
# ${ is not an escape sequence: this is ambiguous with
# the start of a symbol and it's not necessary (just
@@ -370,7 +383,7 @@ def parse_expression(self):
# Shift all characters collected so far into a single string.
if text_parts:
- self.parts.append(''.join(text_parts))
+ self.parts.append("".join(text_parts))
text_parts = []
if char == SYMBOL_DELIM:
@@ -392,7 +405,7 @@ def parse_expression(self):
# If any parsed characters remain, shift them into a string.
if text_parts:
- self.parts.append(''.join(text_parts))
+ self.parts.append("".join(text_parts))
def parse_symbol(self):
"""Parse a variable reference (like ``$foo`` or ``${foo}``)
@@ -419,21 +432,23 @@ def parse_symbol(self):
closer = self.string.find(GROUP_CLOSE, self.pos)
if closer == -1 or closer == self.pos:
# No closing brace found or identifier is empty.
- self.parts.append(self.string[start_pos:self.pos])
+ self.parts.append(self.string[start_pos : self.pos])
else:
# Closer found.
- ident = self.string[self.pos:closer]
+ ident = self.string[self.pos : closer]
self.pos = closer + 1
- self.parts.append(Symbol(ident,
- self.string[start_pos:self.pos]))
+ self.parts.append(
+ Symbol(ident, self.string[start_pos : self.pos])
+ )
else:
# A bare-word symbol.
ident = self._parse_ident()
if ident:
# Found a real symbol.
- self.parts.append(Symbol(ident,
- self.string[start_pos:self.pos]))
+ self.parts.append(
+ Symbol(ident, self.string[start_pos : self.pos])
+ )
else:
# A standalone $.
self.parts.append(SYMBOL_DELIM)
@@ -457,25 +472,24 @@ def parse_call(self):
if self.pos >= len(self.string):
# Identifier terminates string.
- self.parts.append(self.string[start_pos:self.pos])
+ self.parts.append(self.string[start_pos : self.pos])
return
if self.string[self.pos] != GROUP_OPEN:
# Argument list not opened.
- self.parts.append(self.string[start_pos:self.pos])
+ self.parts.append(self.string[start_pos : self.pos])
return
# Skip past opening brace and try to parse an argument list.
self.pos += 1
args = self.parse_argument_list()
- if self.pos >= len(self.string) or \
- self.string[self.pos] != GROUP_CLOSE:
+ if self.pos >= len(self.string) or self.string[self.pos] != GROUP_CLOSE:
# Arguments unclosed.
- self.parts.append(self.string[start_pos:self.pos])
+ self.parts.append(self.string[start_pos : self.pos])
return
self.pos += 1 # Move past closing brace.
- self.parts.append(Call(ident, args, self.string[start_pos:self.pos]))
+ self.parts.append(Call(ident, args, self.string[start_pos : self.pos]))
def parse_argument_list(self):
"""Parse a list of arguments starting at ``pos``, returning a
@@ -487,15 +501,17 @@ def parse_argument_list(self):
expressions = []
while self.pos < len(self.string):
- subparser = Parser(self.string[self.pos:], in_argument=True)
+ subparser = Parser(self.string[self.pos :], in_argument=True)
subparser.parse_expression()
# Extract and advance past the parsed expression.
expressions.append(Expression(subparser.parts))
self.pos += subparser.pos
- if self.pos >= len(self.string) or \
- self.string[self.pos] == GROUP_CLOSE:
+ if (
+ self.pos >= len(self.string)
+ or self.string[self.pos] == GROUP_CLOSE
+ ):
# Argument list terminated by EOF or closing brace.
break
@@ -510,8 +526,8 @@ def _parse_ident(self):
"""Parse an identifier and return it (possibly an empty string).
Updates ``pos``.
"""
- remainder = self.string[self.pos:]
- ident = re.match(r'\w*', remainder).group(0)
+ remainder = self.string[self.pos :]
+ ident = re.match(r"\w*", remainder).group(0)
self.pos += len(ident)
return ident
@@ -524,7 +540,7 @@ def _parse(template):
parser.parse_expression()
parts = parser.parts
- remainder = parser.string[parser.pos:]
+ remainder = parser.string[parser.pos :]
if remainder:
parts.append(remainder)
return Expression(parts)
@@ -537,8 +553,7 @@ def template(fmt):
# External interface.
class Template:
- """A string template, including text, Symbols, and Calls.
- """
+ """A string template, including text, Symbols, and Calls."""
def __init__(self, template):
self.expr = _parse(template)
@@ -557,8 +572,7 @@ def interpret(self, values={}, functions={}):
return self.expr.evaluate(Environment(values, functions))
def substitute(self, values={}, functions={}):
- """Evaluate the template given the values and functions.
- """
+ """Evaluate the template given the values and functions."""
try:
res = self.compiled(values, functions)
except Exception: # Handle any exceptions thrown by compiled version.
@@ -588,24 +602,29 @@ def wrapper_func(values={}, functions={}):
for funcname in funcnames:
args[FUNCTION_PREFIX + funcname] = functions[funcname]
parts = func(**args)
- return ''.join(parts)
+ return "".join(parts)
return wrapper_func
# Performance tests.
-if __name__ == '__main__':
+if __name__ == "__main__":
import timeit
- _tmpl = Template('foo $bar %baz{foozle $bar barzle} $bar')
- _vars = {'bar': 'qux'}
- _funcs = {'baz': str.upper}
- interp_time = timeit.timeit('_tmpl.interpret(_vars, _funcs)',
- 'from __main__ import _tmpl, _vars, _funcs',
- number=10000)
+
+ _tmpl = Template("foo $bar %baz{foozle $bar barzle} $bar")
+ _vars = {"bar": "qux"}
+ _funcs = {"baz": str.upper}
+ interp_time = timeit.timeit(
+ "_tmpl.interpret(_vars, _funcs)",
+ "from __main__ import _tmpl, _vars, _funcs",
+ number=10000,
+ )
print(interp_time)
- comp_time = timeit.timeit('_tmpl.substitute(_vars, _funcs)',
- 'from __main__ import _tmpl, _vars, _funcs',
- number=10000)
+ comp_time = timeit.timeit(
+ "_tmpl.substitute(_vars, _funcs)",
+ "from __main__ import _tmpl, _vars, _funcs",
+ number=10000,
+ )
print(comp_time)
- print('Speedup:', interp_time / comp_time)
+ print("Speedup:", interp_time / comp_time)
diff --git a/beets/util/hidden.py b/beets/util/hidden.py
index c461d671f9..e84b10e1d6 100644
--- a/beets/util/hidden.py
+++ b/beets/util/hidden.py
@@ -14,10 +14,11 @@
"""Simple library to work out if a file is hidden on different platforms."""
+import ctypes
import os
import stat
-import ctypes
import sys
+
import beets.util
@@ -28,7 +29,7 @@ def _is_hidden_osx(path):
"""
file_stat = os.lstat(beets.util.syspath(path))
- if hasattr(file_stat, 'st_flags') and hasattr(stat, 'UF_HIDDEN'):
+ if hasattr(file_stat, "st_flags") and hasattr(stat, "UF_HIDDEN"):
return bool(file_stat.st_flags & stat.UF_HIDDEN)
else:
return False
@@ -55,7 +56,7 @@ def _is_hidden_dot(path):
Files starting with a dot are seen as "hidden" files on Unix-based OSes.
"""
- return os.path.basename(path).startswith(b'.')
+ return os.path.basename(path).startswith(b".")
def is_hidden(path):
@@ -74,11 +75,12 @@ def is_hidden(path):
work out if a file is hidden.
"""
# Run platform specific functions depending on the platform
- if sys.platform == 'darwin':
+ if sys.platform == "darwin":
return _is_hidden_osx(path) or _is_hidden_dot(path)
- elif sys.platform == 'win32':
+ elif sys.platform == "win32":
return _is_hidden_win(path)
else:
return _is_hidden_dot(path)
-__all__ = ['is_hidden']
+
+__all__ = ["is_hidden"]
diff --git a/beets/util/id_extractors.py b/beets/util/id_extractors.py
index 93fc2056c8..04e9e94a75 100644
--- a/beets/util/id_extractors.py
+++ b/beets/util/id_extractors.py
@@ -19,18 +19,18 @@
# Spotify IDs consist of 22 alphanumeric characters
# (zero-left-padded base62 representation of randomly generated UUID4)
spotify_id_regex = {
- 'pattern': r'(^|open\.spotify\.com/{}/)([0-9A-Za-z]{{22}})',
- 'match_group': 2,
+ "pattern": r"(^|open\.spotify\.com/{}/)([0-9A-Za-z]{{22}})",
+ "match_group": 2,
}
deezer_id_regex = {
- 'pattern': r'(^|deezer\.com/)([a-z]*/)?({}/)?(\d+)',
- 'match_group': 4,
+ "pattern": r"(^|deezer\.com/)([a-z]*/)?({}/)?(\d+)",
+ "match_group": 4,
}
beatport_id_regex = {
- 'pattern': r'(^|beatport\.com/release/.+/)(\d+)$',
- 'match_group': 2,
+ "pattern": r"(^|beatport\.com/release/.+/)(\d+)$",
+ "match_group": 2,
}
# A note on Bandcamp: There is no such thing as a Bandcamp album or artist ID,
@@ -54,12 +54,12 @@ def extract_discogs_id_regex(album_id):
# Regex has been tested here https://regex101.com/r/TOu7kw/1
for pattern in [
- r'^\[?r?(?P\d+)\]?$',
- r'discogs\.com/release/(?P\d+)-?',
- r'discogs\.com/[^/]+/release/(?P\d+)',
+ r"^\[?r?(?P\d+)\]?$",
+ r"discogs\.com/release/(?P\d+)-?",
+ r"discogs\.com/[^/]+/release/(?P\d+)",
]:
match = re.search(pattern, album_id)
if match:
- return int(match.group('id'))
+ return int(match.group("id"))
return None
diff --git a/beets/util/m3u.py b/beets/util/m3u.py
index 9c961a2918..b6e355e06b 100644
--- a/beets/util/m3u.py
+++ b/beets/util/m3u.py
@@ -16,16 +16,18 @@
import traceback
-from beets.util import syspath, normpath, mkdirall, FilesystemError
+from beets.util import FilesystemError, mkdirall, normpath, syspath
class EmptyPlaylistError(Exception):
"""Raised when a playlist file without media files is saved or loaded."""
+
pass
-class M3UFile():
+class M3UFile:
"""Reads and writes m3u or m3u8 playlist files."""
+
def __init__(self, path):
"""``path`` is the absolute path to the playlist file.
@@ -45,8 +47,9 @@ def load(self):
with open(syspath(pl_normpath), "rb") as pl_file:
raw_contents = pl_file.readlines()
except OSError as exc:
- raise FilesystemError(exc, 'read', (pl_normpath, ),
- traceback.format_exc())
+ raise FilesystemError(
+ exc, "read", (pl_normpath,), traceback.format_exc()
+ )
self.extm3u = True if raw_contents[0].rstrip() == b"#EXTM3U" else False
for line in raw_contents[1:]:
@@ -86,8 +89,9 @@ def write(self):
try:
with open(syspath(pl_normpath), "wb") as pl_file:
for line in contents:
- pl_file.write(line + b'\n')
- pl_file.write(b'\n') # Final linefeed to prevent noeol file.
+ pl_file.write(line + b"\n")
+ pl_file.write(b"\n") # Final linefeed to prevent noeol file.
except OSError as exc:
- raise FilesystemError(exc, 'create', (pl_normpath, ),
- traceback.format_exc())
+ raise FilesystemError(
+ exc, "create", (pl_normpath,), traceback.format_exc()
+ )
diff --git a/beets/util/pipeline.py b/beets/util/pipeline.py
index f4cc238193..c4933ff007 100644
--- a/beets/util/pipeline.py
+++ b/beets/util/pipeline.py
@@ -33,11 +33,11 @@
import queue
-from threading import Thread, Lock
import sys
+from threading import Lock, Thread
-BUBBLE = '__PIPELINE_BUBBLE__'
-POISON = '__PIPELINE_POISON__'
+BUBBLE = "__PIPELINE_BUBBLE__"
+POISON = "__PIPELINE_POISON__"
DEFAULT_QUEUE_SIZE = 16
@@ -48,6 +48,7 @@ def _invalidate_queue(q, val=None, sync=True):
which defaults to None. `sync` controls whether a lock is
required (because it's not reentrant!).
"""
+
def _qsize(len=len):
return 1
@@ -168,6 +169,7 @@ def coro(*args):
while True:
task = yield task
task = func(*(args + (task,)))
+
return coro
@@ -191,6 +193,7 @@ def coro(*args):
while True:
task = yield task
func(*(args + (task,)))
+
return coro
@@ -218,20 +221,18 @@ def __init__(self, all_threads):
self.exc_info = None
def abort(self):
- """Shut down the thread at the next chance possible.
- """
+ """Shut down the thread at the next chance possible."""
with self.abort_lock:
self.abort_flag = True
# Ensure that we are not blocking on a queue read or write.
- if hasattr(self, 'in_queue'):
+ if hasattr(self, "in_queue"):
_invalidate_queue(self.in_queue, POISON)
- if hasattr(self, 'out_queue'):
+ if hasattr(self, "out_queue"):
_invalidate_queue(self.out_queue, POISON)
def abort_all(self, exc_info):
- """Abort all other threads in the system for an exception.
- """
+ """Abort all other threads in the system for an exception."""
self.exc_info = exc_info
for thread in self.all_threads:
thread.abort()
@@ -373,7 +374,7 @@ def __init__(self, stages):
be at least two stages.
"""
if len(stages) < 2:
- raise ValueError('pipeline must have at least two stages')
+ raise ValueError("pipeline must have at least two stages")
self.stages = []
for stage in stages:
if isinstance(stage, (list, tuple)):
@@ -405,15 +406,15 @@ def run_parallel(self, queue_size=DEFAULT_QUEUE_SIZE):
# Middle stages.
for i in range(1, queue_count):
for coro in self.stages[i]:
- threads.append(MiddlePipelineThread(
- coro, queues[i - 1], queues[i], threads
- ))
+ threads.append(
+ MiddlePipelineThread(
+ coro, queues[i - 1], queues[i], threads
+ )
+ )
# Last stage.
for coro in self.stages[-1]:
- threads.append(
- LastPipelineThread(coro, queues[-1], threads)
- )
+ threads.append(LastPipelineThread(coro, queues[-1], threads))
# Start threads.
for thread in threads:
@@ -472,21 +473,21 @@ def pull(self):
# Smoke test.
-if __name__ == '__main__':
+if __name__ == "__main__":
import time
# Test a normally-terminating pipeline both in sequence and
# in parallel.
def produce():
for i in range(5):
- print('generating %i' % i)
+ print("generating %i" % i)
time.sleep(1)
yield i
def work():
num = yield
while True:
- print('processing %i' % num)
+ print("processing %i" % num)
time.sleep(2)
num = yield num * 2
@@ -494,7 +495,7 @@ def consume():
while True:
num = yield
time.sleep(1)
- print('received %i' % num)
+ print("received %i" % num)
ts_start = time.time()
Pipeline([produce(), work(), consume()]).run_sequential()
@@ -503,22 +504,22 @@ def consume():
ts_par = time.time()
Pipeline([produce(), (work(), work()), consume()]).run_parallel()
ts_end = time.time()
- print('Sequential time:', ts_seq - ts_start)
- print('Parallel time:', ts_par - ts_seq)
- print('Multiply-parallel time:', ts_end - ts_par)
+ print("Sequential time:", ts_seq - ts_start)
+ print("Parallel time:", ts_par - ts_seq)
+ print("Multiply-parallel time:", ts_end - ts_par)
print()
# Test a pipeline that raises an exception.
def exc_produce():
for i in range(10):
- print('generating %i' % i)
+ print("generating %i" % i)
time.sleep(1)
yield i
def exc_work():
num = yield
while True:
- print('processing %i' % num)
+ print("processing %i" % num)
time.sleep(3)
if num == 3:
raise Exception()
@@ -527,6 +528,6 @@ def exc_work():
def exc_consume():
while True:
num = yield
- print('received %i' % num)
+ print("received %i" % num)
Pipeline([exc_produce(), exc_work(), exc_consume()]).run_parallel(1)
diff --git a/beets/vfs.py b/beets/vfs.py
index aef696508d..4a9681a926 100644
--- a/beets/vfs.py
+++ b/beets/vfs.py
@@ -17,9 +17,10 @@
"""
from collections import namedtuple
+
from beets import util
-Node = namedtuple('Node', ['files', 'dirs'])
+Node = namedtuple("Node", ["files", "dirs"])
def _insert(node, path, itemid):
diff --git a/beetsplug/__init__.py b/beetsplug/__init__.py
index da2484917a..763ff3a05a 100644
--- a/beetsplug/__init__.py
+++ b/beetsplug/__init__.py
@@ -17,4 +17,5 @@
# Make this a namespace package.
from pkgutil import extend_path
+
__path__ = extend_path(__path__, __name__)
diff --git a/beetsplug/absubmit.py b/beetsplug/absubmit.py
index a1ee4b82b8..03e7a9e972 100644
--- a/beetsplug/absubmit.py
+++ b/beetsplug/absubmit.py
@@ -22,16 +22,14 @@
import os
import subprocess
import tempfile
-
from distutils.spawn import find_executable
+
import requests
-from beets import plugins
-from beets import util
-from beets import ui
+from beets import plugins, ui, util
# We use this field to check whether AcousticBrainz info is present.
-PROBE_FIELD = 'mood_acoustic'
+PROBE_FIELD = "mood_acoustic"
class ABSubmitError(Exception):
@@ -47,42 +45,39 @@ def call(args):
return util.command_output(args).stdout
except subprocess.CalledProcessError as e:
raise ABSubmitError(
- '{} exited with status {}'.format(args[0], e.returncode)
+ "{} exited with status {}".format(args[0], e.returncode)
)
class AcousticBrainzSubmitPlugin(plugins.BeetsPlugin):
-
def __init__(self):
super().__init__()
self._log.warning("This plugin is deprecated.")
- self.config.add({
- 'extractor': '',
- 'force': False,
- 'pretend': False,
- 'base_url': ''
- })
+ self.config.add(
+ {"extractor": "", "force": False, "pretend": False, "base_url": ""}
+ )
- self.extractor = self.config['extractor'].as_str()
+ self.extractor = self.config["extractor"].as_str()
if self.extractor:
self.extractor = util.normpath(self.extractor)
# Explicit path to extractor
if not os.path.isfile(self.extractor):
raise ui.UserError(
- 'Extractor command does not exist: {0}.'.
- format(self.extractor)
+ "Extractor command does not exist: {0}.".format(
+ self.extractor
+ )
)
else:
# Implicit path to extractor, search for it in path
- self.extractor = 'streaming_extractor_music'
+ self.extractor = "streaming_extractor_music"
try:
call([self.extractor])
except OSError:
raise ui.UserError(
- 'No extractor command found: please install the extractor'
- ' binary from https://essentia.upf.edu/'
+ "No extractor command found: please install the extractor"
+ " binary from https://essentia.upf.edu/"
)
except ABSubmitError:
# Extractor found, will exit with an error if not called with
@@ -95,35 +90,42 @@ def __init__(self):
# Calculate extractor hash.
self.extractor_sha = hashlib.sha1()
- with open(self.extractor, 'rb') as extractor:
+ with open(self.extractor, "rb") as extractor:
self.extractor_sha.update(extractor.read())
self.extractor_sha = self.extractor_sha.hexdigest()
- self.url = ''
- base_url = self.config['base_url'].as_str()
+ self.url = ""
+ base_url = self.config["base_url"].as_str()
if base_url:
- if not base_url.startswith('http'):
- raise ui.UserError('AcousticBrainz server base URL must start '
- 'with an HTTP scheme')
- elif base_url[-1] != '/':
- base_url = base_url + '/'
- self.url = base_url + '{mbid}/low-level'
+ if not base_url.startswith("http"):
+ raise ui.UserError(
+ "AcousticBrainz server base URL must start "
+ "with an HTTP scheme"
+ )
+ elif base_url[-1] != "/":
+ base_url = base_url + "/"
+ self.url = base_url + "{mbid}/low-level"
def commands(self):
cmd = ui.Subcommand(
- 'absubmit',
- help='calculate and submit AcousticBrainz analysis'
+ "absubmit", help="calculate and submit AcousticBrainz analysis"
)
cmd.parser.add_option(
- '-f', '--force', dest='force_refetch',
- action='store_true', default=False,
- help='re-download data when already present'
+ "-f",
+ "--force",
+ dest="force_refetch",
+ action="store_true",
+ default=False,
+ help="re-download data when already present",
)
cmd.parser.add_option(
- '-p', '--pretend', dest='pretend_fetch',
- action='store_true', default=False,
- help='pretend to perform action, but show \
-only files which would be processed'
+ "-p",
+ "--pretend",
+ dest="pretend_fetch",
+ action="store_true",
+ default=False,
+ help="pretend to perform action, but show \
+only files which would be processed",
)
cmd.func = self.command
return [cmd]
@@ -131,9 +133,9 @@ def commands(self):
def command(self, lib, opts, args):
if not self.url:
raise ui.UserError(
- 'This plugin is deprecated since AcousticBrainz no longer '
- 'accepts new submissions. See the base_url configuration '
- 'option.'
+ "This plugin is deprecated since AcousticBrainz no longer "
+ "accepts new submissions. See the base_url configuration "
+ "option."
)
else:
# Get items from arguments
@@ -147,28 +149,29 @@ def analyze_submit(self, item):
self._submit_data(item, analysis)
def _get_analysis(self, item):
- mbid = item['mb_trackid']
+ mbid = item["mb_trackid"]
# Avoid re-analyzing files that already have AB data.
- if not self.opts.force_refetch and not self.config['force']:
+ if not self.opts.force_refetch and not self.config["force"]:
if item.get(PROBE_FIELD):
return None
# If file has no MBID, skip it.
if not mbid:
- self._log.info('Not analysing {}, missing '
- 'musicbrainz track id.', item)
+ self._log.info(
+ "Not analysing {}, missing " "musicbrainz track id.", item
+ )
return None
- if self.opts.pretend_fetch or self.config['pretend']:
- self._log.info('pretend action - extract item: {}', item)
+ if self.opts.pretend_fetch or self.config["pretend"]:
+ self._log.info("pretend action - extract item: {}", item)
return None
# Temporary file to save extractor output to, extractor only works
# if an output file is given. Here we use a temporary file to copy
# the data into a python object and then remove the file from the
# system.
- tmp_file, filename = tempfile.mkstemp(suffix='.json')
+ tmp_file, filename = tempfile.mkstemp(suffix=".json")
try:
# Close the file, so the extractor can overwrite it.
os.close(tmp_file)
@@ -176,15 +179,17 @@ def _get_analysis(self, item):
call([self.extractor, util.syspath(item.path), filename])
except ABSubmitError as e:
self._log.warning(
- 'Failed to analyse {item} for AcousticBrainz: {error}',
- item=item, error=e
+ "Failed to analyse {item} for AcousticBrainz: {error}",
+ item=item,
+ error=e,
)
return None
with open(filename) as tmp_file:
analysis = json.load(tmp_file)
# Add the hash to the output.
- analysis['metadata']['version']['essentia_build_sha'] = \
- self.extractor_sha
+ analysis["metadata"]["version"][
+ "essentia_build_sha"
+ ] = self.extractor_sha
return analysis
finally:
try:
@@ -195,20 +200,25 @@ def _get_analysis(self, item):
raise
def _submit_data(self, item, data):
- mbid = item['mb_trackid']
- headers = {'Content-Type': 'application/json'}
- response = requests.post(self.url.format(mbid=mbid),
- json=data, headers=headers)
+ mbid = item["mb_trackid"]
+ headers = {"Content-Type": "application/json"}
+ response = requests.post(
+ self.url.format(mbid=mbid), json=data, headers=headers
+ )
# Test that request was successful and raise an error on failure.
if response.status_code != 200:
try:
- message = response.json()['message']
+ message = response.json()["message"]
except (ValueError, KeyError) as e:
- message = f'unable to get error message: {e}'
+ message = f"unable to get error message: {e}"
self._log.error(
- 'Failed to submit AcousticBrainz analysis of {item}: '
- '{message}).', item=item, message=message
+ "Failed to submit AcousticBrainz analysis of {item}: "
+ "{message}).",
+ item=item,
+ message=message,
)
else:
- self._log.debug('Successfully submitted AcousticBrainz analysis '
- 'for {}.', item)
+ self._log.debug(
+ "Successfully submitted AcousticBrainz analysis " "for {}.",
+ item,
+ )
diff --git a/beetsplug/acousticbrainz.py b/beetsplug/acousticbrainz.py
index eeb22cfdb8..6aec70edeb 100644
--- a/beetsplug/acousticbrainz.py
+++ b/beetsplug/acousticbrainz.py
@@ -24,114 +24,61 @@
LEVELS = ["/low-level", "/high-level"]
ABSCHEME = {
- 'highlevel': {
- 'danceability': {
- 'all': {
- 'danceable': 'danceable'
- }
- },
- 'gender': {
- 'value': 'gender'
- },
- 'genre_rosamerica': {
- 'value': 'genre_rosamerica'
- },
- 'mood_acoustic': {
- 'all': {
- 'acoustic': 'mood_acoustic'
- }
- },
- 'mood_aggressive': {
- 'all': {
- 'aggressive': 'mood_aggressive'
- }
- },
- 'mood_electronic': {
- 'all': {
- 'electronic': 'mood_electronic'
- }
- },
- 'mood_happy': {
- 'all': {
- 'happy': 'mood_happy'
- }
- },
- 'mood_party': {
- 'all': {
- 'party': 'mood_party'
- }
- },
- 'mood_relaxed': {
- 'all': {
- 'relaxed': 'mood_relaxed'
- }
- },
- 'mood_sad': {
- 'all': {
- 'sad': 'mood_sad'
- }
- },
- 'moods_mirex': {
- 'value': 'moods_mirex'
- },
- 'ismir04_rhythm': {
- 'value': 'rhythm'
- },
- 'tonal_atonal': {
- 'all': {
- 'tonal': 'tonal'
- }
- },
- 'timbre': {
- 'value': 'timbre'
- },
- 'voice_instrumental': {
- 'value': 'voice_instrumental'
- },
+ "highlevel": {
+ "danceability": {"all": {"danceable": "danceable"}},
+ "gender": {"value": "gender"},
+ "genre_rosamerica": {"value": "genre_rosamerica"},
+ "mood_acoustic": {"all": {"acoustic": "mood_acoustic"}},
+ "mood_aggressive": {"all": {"aggressive": "mood_aggressive"}},
+ "mood_electronic": {"all": {"electronic": "mood_electronic"}},
+ "mood_happy": {"all": {"happy": "mood_happy"}},
+ "mood_party": {"all": {"party": "mood_party"}},
+ "mood_relaxed": {"all": {"relaxed": "mood_relaxed"}},
+ "mood_sad": {"all": {"sad": "mood_sad"}},
+ "moods_mirex": {"value": "moods_mirex"},
+ "ismir04_rhythm": {"value": "rhythm"},
+ "tonal_atonal": {"all": {"tonal": "tonal"}},
+ "timbre": {"value": "timbre"},
+ "voice_instrumental": {"value": "voice_instrumental"},
},
- 'lowlevel': {
- 'average_loudness': 'average_loudness'
+ "lowlevel": {"average_loudness": "average_loudness"},
+ "rhythm": {"bpm": "bpm"},
+ "tonal": {
+ "chords_changes_rate": "chords_changes_rate",
+ "chords_key": "chords_key",
+ "chords_number_rate": "chords_number_rate",
+ "chords_scale": "chords_scale",
+ "key_key": ("initial_key", 0),
+ "key_scale": ("initial_key", 1),
+ "key_strength": "key_strength",
},
- 'rhythm': {
- 'bpm': 'bpm'
- },
- 'tonal': {
- 'chords_changes_rate': 'chords_changes_rate',
- 'chords_key': 'chords_key',
- 'chords_number_rate': 'chords_number_rate',
- 'chords_scale': 'chords_scale',
- 'key_key': ('initial_key', 0),
- 'key_scale': ('initial_key', 1),
- 'key_strength': 'key_strength'
-
- }
}
class AcousticPlugin(plugins.BeetsPlugin):
item_types = {
- 'average_loudness': types.Float(6),
- 'chords_changes_rate': types.Float(6),
- 'chords_key': types.STRING,
- 'chords_number_rate': types.Float(6),
- 'chords_scale': types.STRING,
- 'danceable': types.Float(6),
- 'gender': types.STRING,
- 'genre_rosamerica': types.STRING,
- 'initial_key': types.STRING,
- 'key_strength': types.Float(6),
- 'mood_acoustic': types.Float(6),
- 'mood_aggressive': types.Float(6),
- 'mood_electronic': types.Float(6),
- 'mood_happy': types.Float(6),
- 'mood_party': types.Float(6),
- 'mood_relaxed': types.Float(6),
- 'mood_sad': types.Float(6),
- 'moods_mirex': types.STRING,
- 'rhythm': types.Float(6),
- 'timbre': types.STRING,
- 'tonal': types.Float(6),
- 'voice_instrumental': types.STRING,
+ "average_loudness": types.Float(6),
+ "chords_changes_rate": types.Float(6),
+ "chords_key": types.STRING,
+ "chords_number_rate": types.Float(6),
+ "chords_scale": types.STRING,
+ "danceable": types.Float(6),
+ "gender": types.STRING,
+ "genre_rosamerica": types.STRING,
+ "initial_key": types.STRING,
+ "key_strength": types.Float(6),
+ "mood_acoustic": types.Float(6),
+ "mood_aggressive": types.Float(6),
+ "mood_electronic": types.Float(6),
+ "mood_happy": types.Float(6),
+ "mood_party": types.Float(6),
+ "mood_relaxed": types.Float(6),
+ "mood_sad": types.Float(6),
+ "moods_mirex": types.STRING,
+ "rhythm": types.Float(6),
+ "timbre": types.STRING,
+ "tonal": types.Float(6),
+ "voice_instrumental": types.STRING,
}
def __init__(self):
@@ -139,110 +86,114 @@ def __init__(self):
self._log.warning("This plugin is deprecated.")
- self.config.add({
- 'auto': True,
- 'force': False,
- 'tags': [],
- 'base_url': ''
- })
+ self.config.add(
+ {"auto": True, "force": False, "tags": [], "base_url": ""}
+ )
- self.base_url = self.config['base_url'].as_str()
+ self.base_url = self.config["base_url"].as_str()
if self.base_url:
- if not self.base_url.startswith('http'):
- raise ui.UserError('AcousticBrainz server base URL must start '
- 'with an HTTP scheme')
- elif self.base_url[-1] != '/':
- self.base_url = self.base_url + '/'
+ if not self.base_url.startswith("http"):
+ raise ui.UserError(
+ "AcousticBrainz server base URL must start "
+ "with an HTTP scheme"
+ )
+ elif self.base_url[-1] != "/":
+ self.base_url = self.base_url + "/"
- if self.config['auto']:
- self.register_listener('import_task_files',
- self.import_task_files)
+ if self.config["auto"]:
+ self.register_listener("import_task_files", self.import_task_files)
def commands(self):
- cmd = ui.Subcommand('acousticbrainz',
- help="fetch metadata from AcousticBrainz")
+ cmd = ui.Subcommand(
+ "acousticbrainz", help="fetch metadata from AcousticBrainz"
+ )
cmd.parser.add_option(
- '-f', '--force', dest='force_refetch',
- action='store_true', default=False,
- help='re-download data when already present'
+ "-f",
+ "--force",
+ dest="force_refetch",
+ action="store_true",
+ default=False,
+ help="re-download data when already present",
)
def func(lib, opts, args):
items = lib.items(ui.decargs(args))
- self._fetch_info(items, ui.should_write(),
- opts.force_refetch or self.config['force'])
+ self._fetch_info(
+ items,
+ ui.should_write(),
+ opts.force_refetch or self.config["force"],
+ )
cmd.func = func
return [cmd]
def import_task_files(self, session, task):
- """Function is called upon beet import.
- """
+ """Function is called upon beet import."""
self._fetch_info(task.imported_items(), False, True)
def _get_data(self, mbid):
if not self.base_url:
raise ui.UserError(
- 'This plugin is deprecated since AcousticBrainz has shut '
- 'down. See the base_url configuration option.'
+ "This plugin is deprecated since AcousticBrainz has shut "
+ "down. See the base_url configuration option."
)
data = {}
for url in _generate_urls(self.base_url, mbid):
- self._log.debug('fetching URL: {}', url)
+ self._log.debug("fetching URL: {}", url)
try:
res = requests.get(url)
except requests.RequestException as exc:
- self._log.info('request error: {}', exc)
+ self._log.info("request error: {}", exc)
return {}
if res.status_code == 404:
- self._log.info('recording ID {} not found', mbid)
+ self._log.info("recording ID {} not found", mbid)
return {}
try:
data.update(res.json())
except ValueError:
- self._log.debug('Invalid Response: {}', res.text)
+ self._log.debug("Invalid Response: {}", res.text)
return {}
return data
def _fetch_info(self, items, write, force):
- """Fetch additional information from AcousticBrainz for the `item`s.
- """
- tags = self.config['tags'].as_str_seq()
+ """Fetch additional information from AcousticBrainz for the `item`s."""
+ tags = self.config["tags"].as_str_seq()
for item in items:
# If we're not forcing re-downloading for all tracks, check
# whether the data is already present. We use one
# representative field name to check for previously fetched
# data.
if not force:
- mood_str = item.get('mood_acoustic', '')
+ mood_str = item.get("mood_acoustic", "")
if mood_str:
- self._log.info('data already present for: {}', item)
+ self._log.info("data already present for: {}", item)
continue
# We can only fetch data for tracks with MBIDs.
if not item.mb_trackid:
continue
- self._log.info('getting data for: {}', item)
+ self._log.info("getting data for: {}", item)
data = self._get_data(item.mb_trackid)
if data:
for attr, val in self._map_data_to_scheme(data, ABSCHEME):
if not tags or attr in tags:
- self._log.debug('attribute {} of {} set to {}',
- attr,
- item,
- val)
+ self._log.debug(
+ "attribute {} of {} set to {}", attr, item, val
+ )
setattr(item, attr, val)
else:
- self._log.debug('skipping attribute {} of {}'
- ' (value {}) due to config',
- attr,
- item,
- val)
+ self._log.debug(
+ "skipping attribute {} of {}"
+ " (value {}) due to config",
+ attr,
+ item,
+ val,
+ )
item.store()
if write:
item.try_write()
@@ -302,14 +253,12 @@ def _map_data_to_scheme(self, data, scheme):
# The recursive traversal.
composites = defaultdict(list)
- yield from self._data_to_scheme_child(data,
- scheme,
- composites)
+ yield from self._data_to_scheme_child(data, scheme, composites)
# When composites has been populated, yield the composite attributes
# by joining their parts.
for composite_attr, value_parts in composites.items():
- yield composite_attr, ' '.join(value_parts)
+ yield composite_attr, " ".join(value_parts)
def _data_to_scheme_child(self, subdata, subscheme, composites):
"""The recursive business logic of :meth:`_map_data_to_scheme`:
@@ -324,27 +273,32 @@ def _data_to_scheme_child(self, subdata, subscheme, composites):
for k, v in subscheme.items():
if k in subdata:
if isinstance(v, dict):
- yield from self._data_to_scheme_child(subdata[k],
- v,
- composites)
+ yield from self._data_to_scheme_child(
+ subdata[k], v, composites
+ )
elif isinstance(v, tuple):
composite_attribute, part_number = v
attribute_parts = composites[composite_attribute]
# Parts are not guaranteed to be inserted in order
while len(attribute_parts) <= part_number:
- attribute_parts.append('')
+ attribute_parts.append("")
attribute_parts[part_number] = subdata[k]
else:
yield v, subdata[k]
else:
- self._log.warning('Acousticbrainz did not provide info '
- 'about {}', k)
- self._log.debug('Data {} could not be mapped to scheme {} '
- 'because key {} was not found', subdata, v, k)
+ self._log.warning(
+ "Acousticbrainz did not provide info " "about {}", k
+ )
+ self._log.debug(
+ "Data {} could not be mapped to scheme {} "
+ "because key {} was not found",
+ subdata,
+ v,
+ k,
+ )
def _generate_urls(base_url, mbid):
- """Generates AcousticBrainz end point urls for given `mbid`.
- """
+ """Generates AcousticBrainz end point urls for given `mbid`."""
for level in LEVELS:
yield base_url + mbid + level
diff --git a/beetsplug/advancedrewrite.py b/beetsplug/advancedrewrite.py
index 7844b83641..fbb455314d 100644
--- a/beetsplug/advancedrewrite.py
+++ b/beetsplug/advancedrewrite.py
@@ -14,13 +14,14 @@
"""Plugin to rewrite fields based on a given query."""
-from collections import defaultdict
import shlex
+from collections import defaultdict
import confuse
+
from beets import ui
from beets.dbcore import AndQuery, query_from_strings
-from beets.library import Item, Album
+from beets.library import Album, Item
from beets.plugins import BeetsPlugin
@@ -31,6 +32,7 @@ def rewriter(field, rules):
with the given rewriting rules.
``rules`` must be a list of (query, replacement) pairs.
"""
+
def fieldfunc(item):
value = item._values_fixed[field]
for query, replacement in rules:
@@ -50,29 +52,37 @@ def __init__(self):
"""Parse configuration and register template fields for rewriting."""
super().__init__()
- template = confuse.Sequence({
- 'match': str,
- 'field': str,
- 'replacement': str,
- })
+ template = confuse.Sequence(
+ {
+ "match": str,
+ "field": str,
+ "replacement": str,
+ }
+ )
# Gather all the rewrite rules for each field.
rules = defaultdict(list)
for rule in self.config.get(template):
- query = query_from_strings(AndQuery, Item, prefixes={},
- query_parts=shlex.split(rule['match']))
- fieldname = rule['field']
- replacement = rule['replacement']
+ query = query_from_strings(
+ AndQuery,
+ Item,
+ prefixes={},
+ query_parts=shlex.split(rule["match"]),
+ )
+ fieldname = rule["field"]
+ replacement = rule["replacement"]
if fieldname not in Item._fields:
raise ui.UserError(
- "invalid field name (%s) in rewriter" % fieldname)
- self._log.debug('adding template field {0} → {1}',
- fieldname, replacement)
+ "invalid field name (%s) in rewriter" % fieldname
+ )
+ self._log.debug(
+ "adding template field {0} → {1}", fieldname, replacement
+ )
rules[fieldname].append((query, replacement))
- if fieldname == 'artist':
+ if fieldname == "artist":
# Special case for the artist field: apply the same
# rewrite for "albumartist" as well.
- rules['albumartist'].append((query, replacement))
+ rules["albumartist"].append((query, replacement))
# Replace each template field with the new rewriter function.
for fieldname, fieldrules in rules.items():
diff --git a/beetsplug/albumtypes.py b/beetsplug/albumtypes.py
index b54e802e63..5200b5c6d1 100644
--- a/beetsplug/albumtypes.py
+++ b/beetsplug/albumtypes.py
@@ -26,40 +26,42 @@ class AlbumTypesPlugin(BeetsPlugin):
def __init__(self):
"""Init AlbumTypesPlugin."""
super().__init__()
- self.album_template_fields['atypes'] = self._atypes
- self.config.add({
- 'types': [
- ('ep', 'EP'),
- ('single', 'Single'),
- ('soundtrack', 'OST'),
- ('live', 'Live'),
- ('compilation', 'Anthology'),
- ('remix', 'Remix')
- ],
- 'ignore_va': ['compilation'],
- 'bracket': '[]'
- })
+ self.album_template_fields["atypes"] = self._atypes
+ self.config.add(
+ {
+ "types": [
+ ("ep", "EP"),
+ ("single", "Single"),
+ ("soundtrack", "OST"),
+ ("live", "Live"),
+ ("compilation", "Anthology"),
+ ("remix", "Remix"),
+ ],
+ "ignore_va": ["compilation"],
+ "bracket": "[]",
+ }
+ )
def _atypes(self, item: Album):
"""Returns a formatted string based on album's types."""
- types = self.config['types'].as_pairs()
- ignore_va = self.config['ignore_va'].as_str_seq()
- bracket = self.config['bracket'].as_str()
+ types = self.config["types"].as_pairs()
+ ignore_va = self.config["ignore_va"].as_str_seq()
+ bracket = self.config["bracket"].as_str()
# Assign a left and right bracket or leave blank if argument is empty.
if len(bracket) == 2:
bracket_l = bracket[0]
bracket_r = bracket[1]
else:
- bracket_l = ''
- bracket_r = ''
+ bracket_l = ""
+ bracket_r = ""
- res = ''
+ res = ""
albumtypes = item.albumtypes
is_va = item.mb_albumartistid == VARIOUS_ARTISTS_ID
for type in types:
if type[0] in albumtypes and type[1]:
if not is_va or (type[0] not in ignore_va and is_va):
- res += f'{bracket_l}{type[1]}{bracket_r}'
+ res += f"{bracket_l}{type[1]}{bracket_r}"
return res
diff --git a/beetsplug/aura.py b/beetsplug/aura.py
index f4ae5527a0..5810c3a77d 100644
--- a/beetsplug/aura.py
+++ b/beetsplug/aura.py
@@ -15,35 +15,34 @@
"""An AURA server using Flask."""
-from mimetypes import guess_type
-import re
import os.path
-from os.path import isfile, getsize
-
-from beets.plugins import BeetsPlugin
-from beets.ui import Subcommand, _open_library
-from beets import config
-from beets.util import py3_path
-from beets.library import Item, Album
-from beets.dbcore.query import (
- MatchQuery,
- NotQuery,
- RegexpQuery,
- AndQuery,
- FixedFieldSort,
- SlowFieldSort,
- MultipleSort,
-)
+import re
+from mimetypes import guess_type
+from os.path import getsize, isfile
from flask import (
Blueprint,
Flask,
current_app,
- send_file,
make_response,
request,
+ send_file,
)
+from beets import config
+from beets.dbcore.query import (
+ AndQuery,
+ FixedFieldSort,
+ MatchQuery,
+ MultipleSort,
+ NotQuery,
+ RegexpQuery,
+ SlowFieldSort,
+)
+from beets.library import Album, Item
+from beets.plugins import BeetsPlugin
+from beets.ui import Subcommand, _open_library
+from beets.util import py3_path
# Constants
@@ -608,7 +607,7 @@ def safe_filename(fn):
return False
# In single names, rule out Unix directory traversal names.
- if fn in ('.', '..'):
+ if fn in (".", ".."):
return False
return True
diff --git a/beetsplug/autobpm.py b/beetsplug/autobpm.py
index 80e7910a81..aace0c593c 100644
--- a/beetsplug/autobpm.py
+++ b/beetsplug/autobpm.py
@@ -15,65 +15,77 @@
"""
-from beets import ui
-from beets import util
-from beets.plugins import BeetsPlugin
-
-from librosa import load, beat
+from librosa import beat, load
from soundfile import LibsndfileError
+from beets import ui, util
+from beets.plugins import BeetsPlugin
+
class AutoBPMPlugin(BeetsPlugin):
-
def __init__(self):
super().__init__()
- self.config.add({
- 'auto': True,
- 'overwrite': False,
- })
-
- if self.config['auto'].get(bool):
+ self.config.add(
+ {
+ "auto": True,
+ "overwrite": False,
+ }
+ )
+
+ if self.config["auto"].get(bool):
self.import_stages = [self.imported]
def commands(self):
- cmd = ui.Subcommand('autobpm',
- help='detect and add bpm from audio using Librosa')
+ cmd = ui.Subcommand(
+ "autobpm", help="detect and add bpm from audio using Librosa"
+ )
cmd.func = self.command
return [cmd]
def command(self, lib, opts, args):
- self.calculate_bpm(lib.items(ui.decargs(args)),
- write=ui.should_write())
+ self.calculate_bpm(lib.items(ui.decargs(args)), write=ui.should_write())
def imported(self, session, task):
self.calculate_bpm(task.imported_items())
def calculate_bpm(self, items, write=False):
- overwrite = self.config['overwrite'].get(bool)
+ overwrite = self.config["overwrite"].get(bool)
for item in items:
- if item['bpm']:
- self._log.info('found bpm {0} for {1}',
- item['bpm'], util.displayable_path(item.path))
+ if item["bpm"]:
+ self._log.info(
+ "found bpm {0} for {1}",
+ item["bpm"],
+ util.displayable_path(item.path),
+ )
if not overwrite:
continue
try:
- y, sr = load(util.syspath(item.path), res_type='kaiser_fast')
+ y, sr = load(util.syspath(item.path), res_type="kaiser_fast")
except LibsndfileError as exc:
- self._log.error('LibsndfileError: failed to load {0} {1}',
- util.displayable_path(item.path), exc)
+ self._log.error(
+ "LibsndfileError: failed to load {0} {1}",
+ util.displayable_path(item.path),
+ exc,
+ )
continue
except ValueError as exc:
- self._log.error('ValueError: failed to load {0} {1}',
- util.displayable_path(item.path), exc)
+ self._log.error(
+ "ValueError: failed to load {0} {1}",
+ util.displayable_path(item.path),
+ exc,
+ )
continue
tempo, _ = beat.beat_track(y=y, sr=sr)
bpm = round(tempo)
- item['bpm'] = bpm
- self._log.info('added computed bpm {0} for {1}',
- bpm, util.displayable_path(item.path))
+ item["bpm"] = bpm
+ self._log.info(
+ "added computed bpm {0} for {1}",
+ bpm,
+ util.displayable_path(item.path),
+ )
if write:
item.try_write()
diff --git a/beetsplug/badfiles.py b/beetsplug/badfiles.py
index ec465895b5..056b65346c 100644
--- a/beetsplug/badfiles.py
+++ b/beetsplug/badfiles.py
@@ -16,18 +16,18 @@
"""
-from subprocess import check_output, CalledProcessError, list2cmdline, STDOUT
-
-import shlex
-import os
import errno
+import os
+import shlex
import sys
+from subprocess import STDOUT, CalledProcessError, check_output, list2cmdline
+
import confuse
+
+from beets import importer, ui
from beets.plugins import BeetsPlugin
from beets.ui import Subcommand
from beets.util import displayable_path, par_map
-from beets import ui
-from beets import importer
class CheckerCommandException(Exception):
@@ -52,14 +52,15 @@ def __init__(self):
super().__init__()
self.verbose = False
- self.register_listener('import_task_start',
- self.on_import_task_start)
- self.register_listener('import_task_before_choice',
- self.on_import_task_before_choice)
+ self.register_listener("import_task_start", self.on_import_task_start)
+ self.register_listener(
+ "import_task_before_choice", self.on_import_task_before_choice
+ )
def run_command(self, cmd):
- self._log.debug("running command: {}",
- displayable_path(list2cmdline(cmd)))
+ self._log.debug(
+ "running command: {}", displayable_path(list2cmdline(cmd))
+ )
try:
output = check_output(cmd, stderr=STDOUT)
errors = 0
@@ -70,7 +71,7 @@ def run_command(self, cmd):
status = e.returncode
except OSError as e:
raise CheckerCommandException(cmd, e)
- output = output.decode(sys.getdefaultencoding(), 'replace')
+ output = output.decode(sys.getdefaultencoding(), "replace")
return status, errors, [line for line in output.split("\n") if line]
def check_mp3val(self, path):
@@ -88,12 +89,13 @@ def checker(path):
cmd = shlex.split(command)
cmd.append(path)
return self.run_command(cmd)
+
return checker
def get_checker(self, ext):
ext = ext.lower()
try:
- command = self.config['commands'].get(dict).get(ext)
+ command = self.config["commands"].get(dict).get(ext)
except confuse.NotFoundError:
command = None
if command:
@@ -109,15 +111,17 @@ def check_item(self, item):
dpath = displayable_path(item.path)
self._log.debug("checking path: {}", dpath)
if not os.path.exists(item.path):
- ui.print_("{}: file does not exist".format(
- ui.colorize('text_error', dpath)))
+ ui.print_(
+ "{}: file does not exist".format(
+ ui.colorize("text_error", dpath)
+ )
+ )
# Run the checker against the file if one is found
- ext = os.path.splitext(item.path)[1][1:].decode('utf8', 'ignore')
+ ext = os.path.splitext(item.path)[1][1:].decode("utf8", "ignore")
checker = self.get_checker(ext)
if not checker:
- self._log.error("no checker specified in the config for {}",
- ext)
+ self._log.error("no checker specified in the config for {}", ext)
return []
path = item.path
if not isinstance(path, str):
@@ -129,7 +133,7 @@ def check_item(self, item):
self._log.error(
"command not found: {} when validating file: {}",
e.checker,
- e.path
+ e.path,
)
else:
self._log.error("error invoking {}: {}", e.checker, e.msg)
@@ -139,25 +143,30 @@ def check_item(self, item):
if status > 0:
error_lines.append(
- "{}: checker exited with status {}"
- .format(ui.colorize('text_error', dpath), status))
+ "{}: checker exited with status {}".format(
+ ui.colorize("text_error", dpath), status
+ )
+ )
for line in output:
error_lines.append(f" {line}")
elif errors > 0:
error_lines.append(
- "{}: checker found {} errors or warnings"
- .format(ui.colorize('text_warning', dpath), errors))
+ "{}: checker found {} errors or warnings".format(
+ ui.colorize("text_warning", dpath), errors
+ )
+ )
for line in output:
error_lines.append(f" {line}")
elif self.verbose:
error_lines.append(
- "{}: ok".format(ui.colorize('text_success', dpath)))
+ "{}: ok".format(ui.colorize("text_success", dpath))
+ )
return error_lines
def on_import_task_start(self, task, session):
- if not self.config['check_on_import'].get(False):
+ if not self.config["check_on_import"].get(False):
return
checks_failed = []
@@ -171,26 +180,29 @@ def on_import_task_start(self, task, session):
task._badfiles_checks_failed = checks_failed
def on_import_task_before_choice(self, task, session):
- if hasattr(task, '_badfiles_checks_failed'):
- ui.print_('{} one or more files failed checks:'
- .format(ui.colorize('text_warning', 'BAD')))
+ if hasattr(task, "_badfiles_checks_failed"):
+ ui.print_(
+ "{} one or more files failed checks:".format(
+ ui.colorize("text_warning", "BAD")
+ )
+ )
for error in task._badfiles_checks_failed:
for error_line in error:
ui.print_(error_line)
ui.print_()
- ui.print_('What would you like to do?')
+ ui.print_("What would you like to do?")
- sel = ui.input_options(['aBort', 'skip', 'continue'])
+ sel = ui.input_options(["aBort", "skip", "continue"])
- if sel == 's':
+ if sel == "s":
return importer.action.SKIP
- elif sel == 'c':
+ elif sel == "c":
return None
- elif sel == 'b':
+ elif sel == "b":
raise importer.ImportAbort()
else:
- raise Exception(f'Unexpected selection: {sel}')
+ raise Exception(f"Unexpected selection: {sel}")
def command(self, lib, opts, args):
# Get items from arguments
@@ -204,12 +216,16 @@ def check_and_print(item):
par_map(check_and_print, items)
def commands(self):
- bad_command = Subcommand('bad',
- help='check for corrupt or missing files')
+ bad_command = Subcommand(
+ "bad", help="check for corrupt or missing files"
+ )
bad_command.parser.add_option(
- '-v', '--verbose',
- action='store_true', default=False, dest='verbose',
- help='view results for both the bad and uncorrupted files'
+ "-v",
+ "--verbose",
+ action="store_true",
+ default=False,
+ dest="verbose",
+ help="view results for both the bad and uncorrupted files",
)
bad_command.func = self.command
return [bad_command]
diff --git a/beetsplug/bareasc.py b/beetsplug/bareasc.py
index 3343786f9c..dff0b2e932 100644
--- a/beetsplug/bareasc.py
+++ b/beetsplug/bareasc.py
@@ -19,15 +19,17 @@
"""Provides a bare-ASCII matching query."""
+from unidecode import unidecode
+
from beets import ui
-from beets.ui import print_, decargs
-from beets.plugins import BeetsPlugin
from beets.dbcore.query import StringFieldQuery
-from unidecode import unidecode
+from beets.plugins import BeetsPlugin
+from beets.ui import decargs, print_
class BareascQuery(StringFieldQuery):
"""Compare items using bare ASCII, without accents etc."""
+
@classmethod
def string_match(cls, pattern, val):
"""Convert both pattern and string to plain ASCII before matching.
@@ -53,24 +55,29 @@ def col_clause(self):
class BareascPlugin(BeetsPlugin):
"""Plugin to provide bare-ASCII option for beets matching."""
+
def __init__(self):
"""Default prefix for selecting bare-ASCII matching is #."""
super().__init__()
- self.config.add({
- 'prefix': '#',
- })
+ self.config.add(
+ {
+ "prefix": "#",
+ }
+ )
def queries(self):
"""Register bare-ASCII matching."""
- prefix = self.config['prefix'].as_str()
+ prefix = self.config["prefix"].as_str()
return {prefix: BareascQuery}
def commands(self):
"""Add bareasc command as unidecode version of 'list'."""
- cmd = ui.Subcommand('bareasc',
- help='unidecode version of beet list command')
- cmd.parser.usage += "\n" \
- 'Example: %prog -f \'$album: $title\' artist:beatles'
+ cmd = ui.Subcommand(
+ "bareasc", help="unidecode version of beet list command"
+ )
+ cmd.parser.usage += (
+ "\n" "Example: %prog -f '$album: $title' artist:beatles"
+ )
cmd.parser.add_all_common_options()
cmd.func = self.unidecode_list
return [cmd]
diff --git a/beetsplug/beatport.py b/beetsplug/beatport.py
index bede8071db..6108b03996 100644
--- a/beetsplug/beatport.py
+++ b/beetsplug/beatport.py
@@ -19,20 +19,22 @@
import re
from datetime import datetime, timedelta
+import confuse
from requests_oauthlib import OAuth1Session
-from requests_oauthlib.oauth1_session import (TokenRequestDenied, TokenMissing,
- VerifierMissing)
+from requests_oauthlib.oauth1_session import (
+ TokenMissing,
+ TokenRequestDenied,
+ VerifierMissing,
+)
import beets
import beets.ui
from beets.autotag.hooks import AlbumInfo, TrackInfo
from beets.plugins import BeetsPlugin, MetadataSourcePlugin, get_distance
-import confuse
from beets.util.id_extractors import beatport_id_regex
-
AUTH_ERRORS = (TokenRequestDenied, TokenMissing, VerifierMissing)
-USER_AGENT = f'beets/{beets.__version__} +https://beets.io/'
+USER_AGENT = f"beets/{beets.__version__} +https://beets.io/"
class BeatportAPIError(Exception):
@@ -41,24 +43,23 @@ class BeatportAPIError(Exception):
class BeatportObject:
def __init__(self, data):
- self.beatport_id = data['id']
- self.name = str(data['name'])
- if 'releaseDate' in data:
- self.release_date = datetime.strptime(data['releaseDate'],
- '%Y-%m-%d')
- if 'artists' in data:
- self.artists = [(x['id'], str(x['name']))
- for x in data['artists']]
- if 'genres' in data:
- self.genres = [str(x['name'])
- for x in data['genres']]
+ self.beatport_id = data["id"]
+ self.name = str(data["name"])
+ if "releaseDate" in data:
+ self.release_date = datetime.strptime(
+ data["releaseDate"], "%Y-%m-%d"
+ )
+ if "artists" in data:
+ self.artists = [(x["id"], str(x["name"])) for x in data["artists"]]
+ if "genres" in data:
+ self.genres = [str(x["name"]) for x in data["genres"]]
class BeatportClient:
- _api_base = 'https://oauth-api.beatport.com'
+ _api_base = "https://oauth-api.beatport.com"
def __init__(self, c_key, c_secret, auth_key=None, auth_secret=None):
- """ Initiate the client with OAuth information.
+ """Initiate the client with OAuth information.
For the initial authentication with the backend `auth_key` and
`auth_secret` can be `None`. Use `get_authorize_url` and
@@ -70,14 +71,16 @@ def __init__(self, c_key, c_secret, auth_key=None, auth_secret=None):
:param auth_secret: OAuth1 resource owner secret
"""
self.api = OAuth1Session(
- client_key=c_key, client_secret=c_secret,
+ client_key=c_key,
+ client_secret=c_secret,
resource_owner_key=auth_key,
resource_owner_secret=auth_secret,
- callback_uri='oob')
- self.api.headers = {'User-Agent': USER_AGENT}
+ callback_uri="oob",
+ )
+ self.api.headers = {"User-Agent": USER_AGENT}
def get_authorize_url(self):
- """ Generate the URL for the user to authorize the application.
+ """Generate the URL for the user to authorize the application.
Retrieves a request token from the Beatport API and returns the
corresponding authorization URL on their end that the user has
@@ -92,12 +95,14 @@ def get_authorize_url(self):
:rtype: unicode
"""
self.api.fetch_request_token(
- self._make_url('/identity/1/oauth/request-token'))
+ self._make_url("/identity/1/oauth/request-token")
+ )
return self.api.authorization_url(
- self._make_url('/identity/1/oauth/authorize'))
+ self._make_url("/identity/1/oauth/authorize")
+ )
def get_access_token(self, auth_data):
- """ Obtain the final access token and secret for the API.
+ """Obtain the final access token and secret for the API.
:param auth_data: URL-encoded authorization data as displayed at
the authorization url (obtained via
@@ -107,13 +112,15 @@ def get_access_token(self, auth_data):
:rtype: (unicode, unicode) tuple
"""
self.api.parse_authorization_response(
- "https://beets.io/auth?" + auth_data)
+ "https://beets.io/auth?" + auth_data
+ )
access_data = self.api.fetch_access_token(
- self._make_url('/identity/1/oauth/access-token'))
- return access_data['oauth_token'], access_data['oauth_token_secret']
+ self._make_url("/identity/1/oauth/access-token")
+ )
+ return access_data["oauth_token"], access_data["oauth_token_secret"]
- def search(self, query, release_type='release', details=True):
- """ Perform a search of the Beatport catalogue.
+ def search(self, query, release_type="release", details=True):
+ """Perform a search of the Beatport catalogue.
:param query: Query string
:param release_type: Type of releases to search for, can be
@@ -127,27 +134,30 @@ def search(self, query, release_type='release', details=True):
py:class:`BeatportRelease` or
:py:class:`BeatportTrack`
"""
- response = self._get('catalog/3/search',
- query=query, perPage=5,
- facets=[f'fieldType:{release_type}'])
+ response = self._get(
+ "catalog/3/search",
+ query=query,
+ perPage=5,
+ facets=[f"fieldType:{release_type}"],
+ )
for item in response:
- if release_type == 'release':
+ if release_type == "release":
if details:
- release = self.get_release(item['id'])
+ release = self.get_release(item["id"])
else:
release = BeatportRelease(item)
yield release
- elif release_type == 'track':
+ elif release_type == "track":
yield BeatportTrack(item)
def get_release(self, beatport_id):
- """ Get information about a single release.
+ """Get information about a single release.
:param beatport_id: Beatport ID of the release
:returns: The matching release
:rtype: :py:class:`BeatportRelease`
"""
- response = self._get('/catalog/3/releases', id=beatport_id)
+ response = self._get("/catalog/3/releases", id=beatport_id)
if response:
release = BeatportRelease(response[0])
release.tracks = self.get_release_tracks(beatport_id)
@@ -155,34 +165,35 @@ def get_release(self, beatport_id):
return None
def get_release_tracks(self, beatport_id):
- """ Get all tracks for a given release.
+ """Get all tracks for a given release.
:param beatport_id: Beatport ID of the release
:returns: Tracks in the matching release
:rtype: list of :py:class:`BeatportTrack`
"""
- response = self._get('/catalog/3/tracks', releaseId=beatport_id,
- perPage=100)
+ response = self._get(
+ "/catalog/3/tracks", releaseId=beatport_id, perPage=100
+ )
return [BeatportTrack(t) for t in response]
def get_track(self, beatport_id):
- """ Get information about a single track.
+ """Get information about a single track.
:param beatport_id: Beatport ID of the track
:returns: The matching track
:rtype: :py:class:`BeatportTrack`
"""
- response = self._get('/catalog/3/tracks', id=beatport_id)
+ response = self._get("/catalog/3/tracks", id=beatport_id)
return BeatportTrack(response[0])
def _make_url(self, endpoint):
- """ Get complete URL for a given API endpoint. """
- if not endpoint.startswith('/'):
- endpoint = '/' + endpoint
+ """Get complete URL for a given API endpoint."""
+ if not endpoint.startswith("/"):
+ endpoint = "/" + endpoint
return self._api_base + endpoint
def _get(self, endpoint, **kwargs):
- """ Perform a GET request on a given API endpoint.
+ """Perform a GET request on a given API endpoint.
Automatically extracts result data from the response and converts HTTP
exceptions into :py:class:`BeatportAPIError` objects.
@@ -190,13 +201,16 @@ def _get(self, endpoint, **kwargs):
try:
response = self.api.get(self._make_url(endpoint), params=kwargs)
except Exception as e:
- raise BeatportAPIError("Error connecting to Beatport API: {}"
- .format(e))
+ raise BeatportAPIError(
+ "Error connecting to Beatport API: {}".format(e)
+ )
if not response:
raise BeatportAPIError(
- "Error {0.status_code} for '{0.request.path_url}"
- .format(response))
- return response.json()['results']
+ "Error {0.status_code} for '{0.request.path_url}".format(
+ response
+ )
+ )
+ return response.json()["results"]
class BeatportRelease(BeatportObject):
@@ -212,80 +226,83 @@ def __str__(self):
)
def __repr__(self):
- return str(self).encode('utf-8')
+ return str(self).encode("utf-8")
def __init__(self, data):
BeatportObject.__init__(self, data)
- if 'catalogNumber' in data:
- self.catalog_number = data['catalogNumber']
- if 'label' in data:
- self.label_name = data['label']['name']
- if 'category' in data:
- self.category = data['category']
- if 'slug' in data:
+ if "catalogNumber" in data:
+ self.catalog_number = data["catalogNumber"]
+ if "label" in data:
+ self.label_name = data["label"]["name"]
+ if "category" in data:
+ self.category = data["category"]
+ if "slug" in data:
self.url = "https://beatport.com/release/{}/{}".format(
- data['slug'], data['id'])
- self.genre = data.get('genre')
+ data["slug"], data["id"]
+ )
+ self.genre = data.get("genre")
class BeatportTrack(BeatportObject):
def __str__(self):
artist_str = ", ".join(x[1] for x in self.artists)
- return (""
- .format(artist_str, self.name, self.mix_name))
+ return "".format(
+ artist_str, self.name, self.mix_name
+ )
def __repr__(self):
- return str(self).encode('utf-8')
+ return str(self).encode("utf-8")
def __init__(self, data):
BeatportObject.__init__(self, data)
- if 'title' in data:
- self.title = str(data['title'])
- if 'mixName' in data:
- self.mix_name = str(data['mixName'])
- self.length = timedelta(milliseconds=data.get('lengthMs', 0) or 0)
+ if "title" in data:
+ self.title = str(data["title"])
+ if "mixName" in data:
+ self.mix_name = str(data["mixName"])
+ self.length = timedelta(milliseconds=data.get("lengthMs", 0) or 0)
if not self.length:
try:
- min, sec = data.get('length', '0:0').split(':')
+ min, sec = data.get("length", "0:0").split(":")
self.length = timedelta(minutes=int(min), seconds=int(sec))
except ValueError:
pass
- if 'slug' in data:
- self.url = "https://beatport.com/track/{}/{}" \
- .format(data['slug'], data['id'])
- self.track_number = data.get('trackNumber')
- self.bpm = data.get('bpm')
- self.initial_key = str(
- (data.get('key') or {}).get('shortName')
- )
+ if "slug" in data:
+ self.url = "https://beatport.com/track/{}/{}".format(
+ data["slug"], data["id"]
+ )
+ self.track_number = data.get("trackNumber")
+ self.bpm = data.get("bpm")
+ self.initial_key = str((data.get("key") or {}).get("shortName"))
# Use 'subgenre' and if not present, 'genre' as a fallback.
- if data.get('subGenres'):
- self.genre = str(data['subGenres'][0].get('name'))
- elif data.get('genres'):
- self.genre = str(data['genres'][0].get('name'))
+ if data.get("subGenres"):
+ self.genre = str(data["subGenres"][0].get("name"))
+ elif data.get("genres"):
+ self.genre = str(data["genres"][0].get("name"))
class BeatportPlugin(BeetsPlugin):
- data_source = 'Beatport'
+ data_source = "Beatport"
id_regex = beatport_id_regex
def __init__(self):
super().__init__()
- self.config.add({
- 'apikey': '57713c3906af6f5def151b33601389176b37b429',
- 'apisecret': 'b3fe08c93c80aefd749fe871a16cd2bb32e2b954',
- 'tokenfile': 'beatport_token.json',
- 'source_weight': 0.5,
- })
- self.config['apikey'].redact = True
- self.config['apisecret'].redact = True
+ self.config.add(
+ {
+ "apikey": "57713c3906af6f5def151b33601389176b37b429",
+ "apisecret": "b3fe08c93c80aefd749fe871a16cd2bb32e2b954",
+ "tokenfile": "beatport_token.json",
+ "source_weight": 0.5,
+ }
+ )
+ self.config["apikey"].redact = True
+ self.config["apisecret"].redact = True
self.client = None
- self.register_listener('import_begin', self.setup)
+ self.register_listener("import_begin", self.setup)
def setup(self, session=None):
- c_key = self.config['apikey'].as_str()
- c_secret = self.config['apisecret'].as_str()
+ c_key = self.config["apikey"].as_str()
+ c_secret = self.config["apisecret"].as_str()
# Get the OAuth token from a file or log in.
try:
@@ -295,8 +312,8 @@ def setup(self, session=None):
# No token yet. Generate one.
token, secret = self.authenticate(c_key, c_secret)
else:
- token = tokendata['token']
- secret = tokendata['secret']
+ token = tokendata["token"]
+ secret = tokendata["secret"]
self.client = BeatportClient(c_key, c_secret, token, secret)
@@ -306,8 +323,8 @@ def authenticate(self, c_key, c_secret):
try:
url = auth_client.get_authorize_url()
except AUTH_ERRORS as e:
- self._log.debug('authentication error: {0}', e)
- raise beets.ui.UserError('communication with Beatport failed')
+ self._log.debug("authentication error: {0}", e)
+ raise beets.ui.UserError("communication with Beatport failed")
beets.ui.print_("To authenticate with Beatport, visit:")
beets.ui.print_(url)
@@ -317,29 +334,26 @@ def authenticate(self, c_key, c_secret):
try:
token, secret = auth_client.get_access_token(data)
except AUTH_ERRORS as e:
- self._log.debug('authentication error: {0}', e)
- raise beets.ui.UserError('Beatport token request failed')
+ self._log.debug("authentication error: {0}", e)
+ raise beets.ui.UserError("Beatport token request failed")
# Save the token for later use.
- self._log.debug('Beatport token {0}, secret {1}', token, secret)
- with open(self._tokenfile(), 'w') as f:
- json.dump({'token': token, 'secret': secret}, f)
+ self._log.debug("Beatport token {0}, secret {1}", token, secret)
+ with open(self._tokenfile(), "w") as f:
+ json.dump({"token": token, "secret": secret}, f)
return token, secret
def _tokenfile(self):
- """Get the path to the JSON file for storing the OAuth token.
- """
- return self.config['tokenfile'].get(confuse.Filename(in_app_dir=True))
+ """Get the path to the JSON file for storing the OAuth token."""
+ return self.config["tokenfile"].get(confuse.Filename(in_app_dir=True))
def album_distance(self, items, album_info, mapping):
"""Returns the Beatport source weight and the maximum source weight
for albums.
"""
return get_distance(
- data_source=self.data_source,
- info=album_info,
- config=self.config
+ data_source=self.data_source, info=album_info, config=self.config
)
def track_distance(self, item, track_info):
@@ -347,9 +361,7 @@ def track_distance(self, item, track_info):
for individual tracks.
"""
return get_distance(
- data_source=self.data_source,
- info=track_info,
- config=self.config
+ data_source=self.data_source, info=track_info, config=self.config
)
def candidates(self, items, artist, release, va_likely, extra_tags=None):
@@ -359,33 +371,33 @@ def candidates(self, items, artist, release, va_likely, extra_tags=None):
if va_likely:
query = release
else:
- query = f'{artist} {release}'
+ query = f"{artist} {release}"
try:
return self._get_releases(query)
except BeatportAPIError as e:
- self._log.debug('API Error: {0} (query: {1})', e, query)
+ self._log.debug("API Error: {0} (query: {1})", e, query)
return []
def item_candidates(self, item, artist, title):
"""Returns a list of TrackInfo objects for beatport search results
matching title and artist.
"""
- query = f'{artist} {title}'
+ query = f"{artist} {title}"
try:
return self._get_tracks(query)
except BeatportAPIError as e:
- self._log.debug('API Error: {0} (query: {1})', e, query)
+ self._log.debug("API Error: {0} (query: {1})", e, query)
return []
def album_for_id(self, release_id):
"""Fetches a release by its Beatport ID and returns an AlbumInfo object
or None if the query is not a valid ID or release is not found.
"""
- self._log.debug('Searching for release {0}', release_id)
+ self._log.debug("Searching for release {0}", release_id)
- release_id = self._get_id('album', release_id, self.id_regex)
+ release_id = self._get_id("album", release_id, self.id_regex)
if release_id is None:
- self._log.debug('Not a valid Beatport release ID.')
+ self._log.debug("Not a valid Beatport release ID.")
return None
release = self.client.get_release(release_id)
@@ -397,10 +409,10 @@ def track_for_id(self, track_id):
"""Fetches a track by its Beatport ID and returns a TrackInfo object
or None if the track is not a valid Beatport ID or track is not found.
"""
- self._log.debug('Searching for track {0}', track_id)
- match = re.search(r'(^|beatport\.com/track/.+/)(\d+)$', track_id)
+ self._log.debug("Searching for track {0}", track_id)
+ match = re.search(r"(^|beatport\.com/track/.+/)(\d+)$", track_id)
if not match:
- self._log.debug('Not a valid Beatport track ID.')
+ self._log.debug("Not a valid Beatport track ID.")
return None
bp_track = self.client.get_track(match.group(2))
if bp_track is not None:
@@ -408,56 +420,67 @@ def track_for_id(self, track_id):
return None
def _get_releases(self, query):
- """Returns a list of AlbumInfo objects for a beatport search query.
- """
+ """Returns a list of AlbumInfo objects for a beatport search query."""
# Strip non-word characters from query. Things like "!" and "-" can
# cause a query to return no results, even if they match the artist or
# album title. Use `re.UNICODE` flag to avoid stripping non-english
# word characters.
- query = re.sub(r'\W+', ' ', query, flags=re.UNICODE)
+ query = re.sub(r"\W+", " ", query, flags=re.UNICODE)
# Strip medium information from query, Things like "CD1" and "disk 1"
# can also negate an otherwise positive result.
- query = re.sub(r'\b(CD|disc)\s*\d+', '', query, flags=re.I)
- albums = [self._get_album_info(x)
- for x in self.client.search(query)]
+ query = re.sub(r"\b(CD|disc)\s*\d+", "", query, flags=re.I)
+ albums = [self._get_album_info(x) for x in self.client.search(query)]
return albums
def _get_album_info(self, release):
- """Returns an AlbumInfo object for a Beatport Release object.
- """
+ """Returns an AlbumInfo object for a Beatport Release object."""
va = len(release.artists) > 3
artist, artist_id = self._get_artist(release.artists)
if va:
artist = "Various Artists"
tracks = [self._get_track_info(x) for x in release.tracks]
- return AlbumInfo(album=release.name, album_id=release.beatport_id,
- beatport_album_id=release.beatport_id,
- artist=artist, artist_id=artist_id, tracks=tracks,
- albumtype=release.category, va=va,
- year=release.release_date.year,
- month=release.release_date.month,
- day=release.release_date.day,
- label=release.label_name,
- catalognum=release.catalog_number, media='Digital',
- data_source=self.data_source, data_url=release.url,
- genre=release.genre)
+ return AlbumInfo(
+ album=release.name,
+ album_id=release.beatport_id,
+ beatport_album_id=release.beatport_id,
+ artist=artist,
+ artist_id=artist_id,
+ tracks=tracks,
+ albumtype=release.category,
+ va=va,
+ year=release.release_date.year,
+ month=release.release_date.month,
+ day=release.release_date.day,
+ label=release.label_name,
+ catalognum=release.catalog_number,
+ media="Digital",
+ data_source=self.data_source,
+ data_url=release.url,
+ genre=release.genre,
+ )
def _get_track_info(self, track):
- """Returns a TrackInfo object for a Beatport Track object.
- """
+ """Returns a TrackInfo object for a Beatport Track object."""
title = track.name
if track.mix_name != "Original Mix":
title += f" ({track.mix_name})"
artist, artist_id = self._get_artist(track.artists)
length = track.length.total_seconds()
- return TrackInfo(title=title, track_id=track.beatport_id,
- artist=artist, artist_id=artist_id,
- length=length, index=track.track_number,
- medium_index=track.track_number,
- data_source=self.data_source, data_url=track.url,
- bpm=track.bpm, initial_key=track.initial_key,
- genre=track.genre)
+ return TrackInfo(
+ title=title,
+ track_id=track.beatport_id,
+ artist=artist,
+ artist_id=artist_id,
+ length=length,
+ index=track.track_number,
+ medium_index=track.track_number,
+ data_source=self.data_source,
+ data_url=track.url,
+ bpm=track.bpm,
+ initial_key=track.initial_key,
+ genre=track.genre,
+ )
def _get_artist(self, artists):
"""Returns an artist string (all artists) and an artist_id (the main
@@ -468,8 +491,7 @@ def _get_artist(self, artists):
)
def _get_tracks(self, query):
- """Returns a list of TrackInfo objects for a Beatport query.
- """
- bp_tracks = self.client.search(query, release_type='track')
+ """Returns a list of TrackInfo objects for a Beatport query."""
+ bp_tracks = self.client.search(query, release_type="track")
tracks = [self._get_track_info(x) for x in bp_tracks]
return tracks
diff --git a/beetsplug/bench.py b/beetsplug/bench.py
index 6dffbdda09..673b9b7c64 100644
--- a/beetsplug/bench.py
+++ b/beetsplug/bench.py
@@ -16,17 +16,14 @@
"""
-from beets.plugins import BeetsPlugin
-from beets import ui
-from beets import vfs
-from beets import library
-from beets.util.functemplate import Template
-from beets.autotag import match
-from beets import plugins
-from beets import importer
import cProfile
import timeit
+from beets import importer, library, plugins, ui, vfs
+from beets.autotag import match
+from beets.plugins import BeetsPlugin
+from beets.util.functemplate import Template
+
def aunique_benchmark(lib, prof):
def _build_tree():
@@ -34,74 +31,103 @@ def _build_tree():
# Measure path generation performance with %aunique{} included.
lib.path_formats = [
- (library.PF_KEY_DEFAULT,
- Template('$albumartist/$album%aunique{}/$track $title')),
+ (
+ library.PF_KEY_DEFAULT,
+ Template("$albumartist/$album%aunique{}/$track $title"),
+ ),
]
if prof:
- cProfile.runctx('_build_tree()', {}, {'_build_tree': _build_tree},
- 'paths.withaunique.prof')
+ cProfile.runctx(
+ "_build_tree()",
+ {},
+ {"_build_tree": _build_tree},
+ "paths.withaunique.prof",
+ )
else:
interval = timeit.timeit(_build_tree, number=1)
- print('With %aunique:', interval)
+ print("With %aunique:", interval)
# And with %aunique replaceed with a "cheap" no-op function.
lib.path_formats = [
- (library.PF_KEY_DEFAULT,
- Template('$albumartist/$album%lower{}/$track $title')),
+ (
+ library.PF_KEY_DEFAULT,
+ Template("$albumartist/$album%lower{}/$track $title"),
+ ),
]
if prof:
- cProfile.runctx('_build_tree()', {}, {'_build_tree': _build_tree},
- 'paths.withoutaunique.prof')
+ cProfile.runctx(
+ "_build_tree()",
+ {},
+ {"_build_tree": _build_tree},
+ "paths.withoutaunique.prof",
+ )
else:
interval = timeit.timeit(_build_tree, number=1)
- print('Without %aunique:', interval)
+ print("Without %aunique:", interval)
def match_benchmark(lib, prof, query=None, album_id=None):
# If no album ID is provided, we'll match against a suitably huge
# album.
if not album_id:
- album_id = '9c5c043e-bc69-4edb-81a4-1aaf9c81e6dc'
+ album_id = "9c5c043e-bc69-4edb-81a4-1aaf9c81e6dc"
# Get an album from the library to use as the source for the match.
items = lib.albums(query).get().items()
# Ensure fingerprinting is invoked (if enabled).
- plugins.send('import_task_start',
- task=importer.ImportTask(None, None, items),
- session=importer.ImportSession(lib, None, None, None))
+ plugins.send(
+ "import_task_start",
+ task=importer.ImportTask(None, None, items),
+ session=importer.ImportSession(lib, None, None, None),
+ )
# Run the match.
def _run_match():
match.tag_album(items, search_ids=[album_id])
+
if prof:
- cProfile.runctx('_run_match()', {}, {'_run_match': _run_match},
- 'match.prof')
+ cProfile.runctx(
+ "_run_match()", {}, {"_run_match": _run_match}, "match.prof"
+ )
else:
interval = timeit.timeit(_run_match, number=1)
- print('match duration:', interval)
+ print("match duration:", interval)
class BenchmarkPlugin(BeetsPlugin):
- """A plugin for performing some simple performance benchmarks.
- """
+ """A plugin for performing some simple performance benchmarks."""
+
def commands(self):
- aunique_bench_cmd = ui.Subcommand('bench_aunique',
- help='benchmark for %aunique{}')
- aunique_bench_cmd.parser.add_option('-p', '--profile',
- action='store_true', default=False,
- help='performance profiling')
- aunique_bench_cmd.func = lambda lib, opts, args: \
- aunique_benchmark(lib, opts.profile)
-
- match_bench_cmd = ui.Subcommand('bench_match',
- help='benchmark for track matching')
- match_bench_cmd.parser.add_option('-p', '--profile',
- action='store_true', default=False,
- help='performance profiling')
- match_bench_cmd.parser.add_option('-i', '--id', default=None,
- help='album ID to match against')
- match_bench_cmd.func = lambda lib, opts, args: \
- match_benchmark(lib, opts.profile, ui.decargs(args), opts.id)
+ aunique_bench_cmd = ui.Subcommand(
+ "bench_aunique", help="benchmark for %aunique{}"
+ )
+ aunique_bench_cmd.parser.add_option(
+ "-p",
+ "--profile",
+ action="store_true",
+ default=False,
+ help="performance profiling",
+ )
+ aunique_bench_cmd.func = lambda lib, opts, args: aunique_benchmark(
+ lib, opts.profile
+ )
+
+ match_bench_cmd = ui.Subcommand(
+ "bench_match", help="benchmark for track matching"
+ )
+ match_bench_cmd.parser.add_option(
+ "-p",
+ "--profile",
+ action="store_true",
+ default=False,
+ help="performance profiling",
+ )
+ match_bench_cmd.parser.add_option(
+ "-i", "--id", default=None, help="album ID to match against"
+ )
+ match_bench_cmd.func = lambda lib, opts, args: match_benchmark(
+ lib, opts.profile, ui.decargs(args), opts.id
+ )
return [aunique_bench_cmd, match_bench_cmd]
diff --git a/beetsplug/bpd/__init__.py b/beetsplug/bpd/__init__.py
index 8c02d3d449..3d73964010 100644
--- a/beetsplug/bpd/__init__.py
+++ b/beetsplug/bpd/__init__.py
@@ -18,35 +18,35 @@
"""
+import inspect
+import math
+import random
import re
+import socket
import sys
-from string import Template
-import traceback
-import random
import time
-import math
-import inspect
-import socket
+import traceback
+from string import Template
+
+from mediafile import MediaFile
import beets
-from beets.plugins import BeetsPlugin
import beets.ui
-from beets import vfs
-from beets.util import bluelet
+from beets import dbcore, vfs
from beets.library import Item
-from beets import dbcore
-from mediafile import MediaFile
+from beets.plugins import BeetsPlugin
+from beets.util import bluelet
-PROTOCOL_VERSION = '0.16.0'
+PROTOCOL_VERSION = "0.16.0"
BUFSIZE = 1024
-HELLO = 'OK MPD %s' % PROTOCOL_VERSION
-CLIST_BEGIN = 'command_list_begin'
-CLIST_VERBOSE_BEGIN = 'command_list_ok_begin'
-CLIST_END = 'command_list_end'
-RESP_OK = 'OK'
-RESP_CLIST_VERBOSE = 'list_OK'
-RESP_ERR = 'ACK'
+HELLO = "OK MPD %s" % PROTOCOL_VERSION
+CLIST_BEGIN = "command_list_begin"
+CLIST_VERBOSE_BEGIN = "command_list_ok_begin"
+CLIST_END = "command_list_end"
+RESP_OK = "OK"
+RESP_CLIST_VERBOSE = "list_OK"
+RESP_ERR = "ACK"
NEWLINE = "\n"
@@ -68,15 +68,28 @@
SAFE_COMMANDS = (
# Commands that are available when unauthenticated.
- 'close', 'commands', 'notcommands', 'password', 'ping',
+ "close",
+ "commands",
+ "notcommands",
+ "password",
+ "ping",
)
# List of subsystems/events used by the `idle` command.
SUBSYSTEMS = [
- 'update', 'player', 'mixer', 'options', 'playlist', 'database',
+ "update",
+ "player",
+ "mixer",
+ "options",
+ "playlist",
+ "database",
# Related to unsupported commands:
- 'stored_playlist', 'output', 'subscription', 'sticker', 'message',
- 'partition',
+ "stored_playlist",
+ "output",
+ "subscription",
+ "sticker",
+ "message",
+ "partition",
]
ITEM_KEYS_WRITABLE = set(MediaFile.fields()).intersection(Item._fields.keys())
@@ -89,48 +102,53 @@ class NoGstreamerError(Exception):
# Error-handling, exceptions, parameter parsing.
+
class BPDError(Exception):
"""An error that should be exposed to the client to the BPD
server.
"""
- def __init__(self, code, message, cmd_name='', index=0):
+
+ def __init__(self, code, message, cmd_name="", index=0):
self.code = code
self.message = message
self.cmd_name = cmd_name
self.index = index
- template = Template('$resp [$code@$index] {$cmd_name} $message')
+ template = Template("$resp [$code@$index] {$cmd_name} $message")
def response(self):
"""Returns a string to be used as the response code for the
erring command.
"""
- return self.template.substitute({
- 'resp': RESP_ERR,
- 'code': self.code,
- 'index': self.index,
- 'cmd_name': self.cmd_name,
- 'message': self.message,
- })
+ return self.template.substitute(
+ {
+ "resp": RESP_ERR,
+ "code": self.code,
+ "index": self.index,
+ "cmd_name": self.cmd_name,
+ "message": self.message,
+ }
+ )
def make_bpd_error(s_code, s_message):
- """Create a BPDError subclass for a static code and message.
- """
+ """Create a BPDError subclass for a static code and message."""
class NewBPDError(BPDError):
code = s_code
message = s_message
- cmd_name = ''
+ cmd_name = ""
index = 0
def __init__(self):
pass
+
return NewBPDError
-ArgumentTypeError = make_bpd_error(ERROR_ARG, 'invalid type for argument')
-ArgumentIndexError = make_bpd_error(ERROR_ARG, 'argument out of range')
-ArgumentNotFoundError = make_bpd_error(ERROR_NO_EXIST, 'argument not found')
+
+ArgumentTypeError = make_bpd_error(ERROR_ARG, "invalid type for argument")
+ArgumentIndexError = make_bpd_error(ERROR_ARG, "argument out of range")
+ArgumentNotFoundError = make_bpd_error(ERROR_NO_EXIST, "argument not found")
def cast_arg(t, val):
@@ -140,7 +158,7 @@ def cast_arg(t, val):
If 't' is the special string 'intbool', attempts to cast first
to an int and then to a bool (i.e., 1=True, 0=False).
"""
- if t == 'intbool':
+ if t == "intbool":
return cast_arg(bool, cast_arg(int, val))
else:
try:
@@ -159,6 +177,7 @@ class BPDIdle(Exception):
"""Raised by a command to indicate the client wants to enter the idle state
and should be notified when a relevant event happens.
"""
+
def __init__(self, subsystems):
super().__init__()
self.subsystems = set(subsystems)
@@ -201,8 +220,8 @@ def __init__(self, host, port, password, ctrl_port, log, ctrl_host=None):
self.volume = VOLUME_MAX
self.crossfade = 0
self.mixrampdb = 0.0
- self.mixrampdelay = float('nan')
- self.replay_gain_mode = 'off'
+ self.mixrampdelay = float("nan")
+ self.replay_gain_mode = "off"
self.playlist = []
self.playlist_version = 0
self.current_index = -1
@@ -216,13 +235,11 @@ def __init__(self, host, port, password, ctrl_port, log, ctrl_host=None):
self.random_obj = random.Random()
def connect(self, conn):
- """A new client has connected.
- """
+ """A new client has connected."""
self.connections.add(conn)
def disconnect(self, conn):
- """Client has disconnected; clean up residual state.
- """
+ """Client has disconnected; clean up residual state."""
self.connections.remove(conn)
def run(self):
@@ -233,15 +250,20 @@ def run(self):
def start():
yield bluelet.spawn(
- bluelet.server(self.ctrl_host, self.ctrl_port,
- ControlConnection.handler(self)))
- yield bluelet.server(self.host, self.port,
- MPDConnection.handler(self))
+ bluelet.server(
+ self.ctrl_host,
+ self.ctrl_port,
+ ControlConnection.handler(self),
+ )
+ )
+ yield bluelet.server(
+ self.host, self.port, MPDConnection.handler(self)
+ )
+
bluelet.run(start())
def dispatch_events(self):
- """If any clients have idle events ready, send them.
- """
+ """If any clients have idle events ready, send them."""
# We need a copy of `self.connections` here since clients might
# disconnect once we try and send to them, changing `self.connections`.
for conn in list(self.connections):
@@ -255,7 +277,7 @@ def _ctrl_send(self, message):
if not self.ctrl_sock:
self.ctrl_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.ctrl_sock.connect((self.ctrl_host, self.ctrl_port))
- self.ctrl_sock.sendall((message + '\n').encode('utf-8'))
+ self.ctrl_sock.sendall((message + "\n").encode("utf-8"))
def _send_event(self, event):
"""Notify subscribed connections of an event."""
@@ -269,8 +291,7 @@ def _item_info(self, item):
raise NotImplementedError
def _item_id(self, item):
- """An abstract method returning the integer id for an item.
- """
+ """An abstract method returning the integer id for an item."""
raise NotImplementedError
def _id_to_index(self, track_id):
@@ -326,8 +347,7 @@ def cmd_idle(self, conn, *subsystems):
subsystems = subsystems or SUBSYSTEMS
for system in subsystems:
if system not in SUBSYSTEMS:
- raise BPDError(ERROR_ARG,
- f'Unrecognised idle event: {system}')
+ raise BPDError(ERROR_ARG, f"Unrecognised idle event: {system}")
raise BPDIdle(subsystems) # put the connection into idle mode
def cmd_kill(self, conn):
@@ -344,30 +364,30 @@ def cmd_password(self, conn, password):
conn.authenticated = True
else:
conn.authenticated = False
- raise BPDError(ERROR_PASSWORD, 'incorrect password')
+ raise BPDError(ERROR_PASSWORD, "incorrect password")
def cmd_commands(self, conn):
"""Lists the commands available to the user."""
if self.password and not conn.authenticated:
# Not authenticated. Show limited list of commands.
for cmd in SAFE_COMMANDS:
- yield 'command: ' + cmd
+ yield "command: " + cmd
else:
# Authenticated. Show all commands.
for func in dir(self):
- if func.startswith('cmd_'):
- yield 'command: ' + func[4:]
+ if func.startswith("cmd_"):
+ yield "command: " + func[4:]
def cmd_notcommands(self, conn):
"""Lists all unavailable commands."""
if self.password and not conn.authenticated:
# Not authenticated. Show privileged commands.
for func in dir(self):
- if func.startswith('cmd_'):
+ if func.startswith("cmd_"):
cmd = func[4:]
if cmd not in SAFE_COMMANDS:
- yield 'command: ' + cmd
+ yield "command: " + cmd
else:
# Authenticated. No commands are unavailable.
@@ -381,43 +401,43 @@ def cmd_status(self, conn):
playlist, playlistlength, and xfade.
"""
yield (
- 'repeat: ' + str(int(self.repeat)),
- 'random: ' + str(int(self.random)),
- 'consume: ' + str(int(self.consume)),
- 'single: ' + str(int(self.single)),
- 'playlist: ' + str(self.playlist_version),
- 'playlistlength: ' + str(len(self.playlist)),
- 'mixrampdb: ' + str(self.mixrampdb),
+ "repeat: " + str(int(self.repeat)),
+ "random: " + str(int(self.random)),
+ "consume: " + str(int(self.consume)),
+ "single: " + str(int(self.single)),
+ "playlist: " + str(self.playlist_version),
+ "playlistlength: " + str(len(self.playlist)),
+ "mixrampdb: " + str(self.mixrampdb),
)
if self.volume > 0:
- yield 'volume: ' + str(self.volume)
+ yield "volume: " + str(self.volume)
if not math.isnan(self.mixrampdelay):
- yield 'mixrampdelay: ' + str(self.mixrampdelay)
+ yield "mixrampdelay: " + str(self.mixrampdelay)
if self.crossfade > 0:
- yield 'xfade: ' + str(self.crossfade)
+ yield "xfade: " + str(self.crossfade)
if self.current_index == -1:
- state = 'stop'
+ state = "stop"
elif self.paused:
- state = 'pause'
+ state = "pause"
else:
- state = 'play'
- yield 'state: ' + state
+ state = "play"
+ yield "state: " + state
if self.current_index != -1: # i.e., paused or playing
current_id = self._item_id(self.playlist[self.current_index])
- yield 'song: ' + str(self.current_index)
- yield 'songid: ' + str(current_id)
+ yield "song: " + str(self.current_index)
+ yield "songid: " + str(current_id)
if len(self.playlist) > self.current_index + 1:
# If there's a next song, report its index too.
next_id = self._item_id(self.playlist[self.current_index + 1])
- yield 'nextsong: ' + str(self.current_index + 1)
- yield 'nextsongid: ' + str(next_id)
+ yield "nextsong: " + str(self.current_index + 1)
+ yield "nextsongid: " + str(next_id)
if self.error:
- yield 'error: ' + self.error
+ yield "error: " + self.error
def cmd_clearerror(self, conn):
"""Removes the persistent error state of the server. This
@@ -428,32 +448,32 @@ def cmd_clearerror(self, conn):
def cmd_random(self, conn, state):
"""Set or unset random (shuffle) mode."""
- self.random = cast_arg('intbool', state)
- self._send_event('options')
+ self.random = cast_arg("intbool", state)
+ self._send_event("options")
def cmd_repeat(self, conn, state):
"""Set or unset repeat mode."""
- self.repeat = cast_arg('intbool', state)
- self._send_event('options')
+ self.repeat = cast_arg("intbool", state)
+ self._send_event("options")
def cmd_consume(self, conn, state):
"""Set or unset consume mode."""
- self.consume = cast_arg('intbool', state)
- self._send_event('options')
+ self.consume = cast_arg("intbool", state)
+ self._send_event("options")
def cmd_single(self, conn, state):
"""Set or unset single mode."""
# TODO support oneshot in addition to 0 and 1 [MPD 0.20]
- self.single = cast_arg('intbool', state)
- self._send_event('options')
+ self.single = cast_arg("intbool", state)
+ self._send_event("options")
def cmd_setvol(self, conn, vol):
"""Set the player's volume level (0-100)."""
vol = cast_arg(int, vol)
if vol < VOLUME_MIN or vol > VOLUME_MAX:
- raise BPDError(ERROR_ARG, 'volume out of range')
+ raise BPDError(ERROR_ARG, "volume out of range")
self.volume = vol
- self._send_event('mixer')
+ self._send_event("mixer")
def cmd_volume(self, conn, vol_delta):
"""Deprecated command to change the volume by a relative amount."""
@@ -464,47 +484,47 @@ def cmd_crossfade(self, conn, crossfade):
"""Set the number of seconds of crossfading."""
crossfade = cast_arg(int, crossfade)
if crossfade < 0:
- raise BPDError(ERROR_ARG, 'crossfade time must be nonnegative')
- self._log.warning('crossfade is not implemented in bpd')
+ raise BPDError(ERROR_ARG, "crossfade time must be nonnegative")
+ self._log.warning("crossfade is not implemented in bpd")
self.crossfade = crossfade
- self._send_event('options')
+ self._send_event("options")
def cmd_mixrampdb(self, conn, db):
"""Set the mixramp normalised max volume in dB."""
db = cast_arg(float, db)
if db > 0:
- raise BPDError(ERROR_ARG, 'mixrampdb time must be negative')
- self._log.warning('mixramp is not implemented in bpd')
+ raise BPDError(ERROR_ARG, "mixrampdb time must be negative")
+ self._log.warning("mixramp is not implemented in bpd")
self.mixrampdb = db
- self._send_event('options')
+ self._send_event("options")
def cmd_mixrampdelay(self, conn, delay):
"""Set the mixramp delay in seconds."""
delay = cast_arg(float, delay)
if delay < 0:
- raise BPDError(ERROR_ARG, 'mixrampdelay time must be nonnegative')
- self._log.warning('mixramp is not implemented in bpd')
+ raise BPDError(ERROR_ARG, "mixrampdelay time must be nonnegative")
+ self._log.warning("mixramp is not implemented in bpd")
self.mixrampdelay = delay
- self._send_event('options')
+ self._send_event("options")
def cmd_replay_gain_mode(self, conn, mode):
"""Set the replay gain mode."""
- if mode not in ['off', 'track', 'album', 'auto']:
- raise BPDError(ERROR_ARG, 'Unrecognised replay gain mode')
- self._log.warning('replay gain is not implemented in bpd')
+ if mode not in ["off", "track", "album", "auto"]:
+ raise BPDError(ERROR_ARG, "Unrecognised replay gain mode")
+ self._log.warning("replay gain is not implemented in bpd")
self.replay_gain_mode = mode
- self._send_event('options')
+ self._send_event("options")
def cmd_replay_gain_status(self, conn):
"""Get the replaygain mode."""
- yield 'replay_gain_mode: ' + str(self.replay_gain_mode)
+ yield "replay_gain_mode: " + str(self.replay_gain_mode)
def cmd_clear(self, conn):
"""Clear the playlist."""
self.playlist = []
self.playlist_version += 1
self.cmd_stop(conn)
- self._send_event('playlist')
+ self._send_event("playlist")
def cmd_delete(self, conn, index):
"""Remove the song at index from the playlist."""
@@ -520,7 +540,7 @@ def cmd_delete(self, conn, index):
elif index < self.current_index: # Deleted before playing.
# Shift playing index down.
self.current_index -= 1
- self._send_event('playlist')
+ self._send_event("playlist")
def cmd_deleteid(self, conn, track_id):
self.cmd_delete(conn, self._id_to_index(track_id))
@@ -544,7 +564,7 @@ def cmd_move(self, conn, idx_from, idx_to):
self.current_index += 1
self.playlist_version += 1
- self._send_event('playlist')
+ self._send_event("playlist")
def cmd_moveid(self, conn, idx_from, idx_to):
idx_from = self._id_to_index(idx_from)
@@ -570,7 +590,7 @@ def cmd_swap(self, conn, i, j):
self.current_index = i
self.playlist_version += 1
- self._send_event('playlist')
+ self._send_event("playlist")
def cmd_swapid(self, conn, i_id, j_id):
i = self._id_to_index(i_id)
@@ -618,12 +638,11 @@ def cmd_plchangesposid(self, conn, version):
Also a dummy implementation.
"""
for idx, track in enumerate(self.playlist):
- yield 'cpos: ' + str(idx)
- yield 'Id: ' + str(track.id)
+ yield "cpos: " + str(idx)
+ yield "Id: " + str(track.id)
def cmd_currentsong(self, conn):
- """Sends information about the currently-playing song.
- """
+ """Sends information about the currently-playing song."""
if self.current_index != -1: # -1 means stopped.
track = self.playlist[self.current_index]
yield self._item_info(track)
@@ -668,8 +687,8 @@ def cmd_pause(self, conn, state=None):
if state is None:
self.paused = not self.paused # Toggle.
else:
- self.paused = cast_arg('intbool', state)
- self._send_event('player')
+ self.paused = cast_arg("intbool", state)
+ self._send_event("player")
def cmd_play(self, conn, index=-1):
"""Begin playback, possibly at a specified playlist index."""
@@ -689,7 +708,7 @@ def cmd_play(self, conn, index=-1):
self.current_index = index
self.paused = False
- self._send_event('player')
+ self._send_event("player")
def cmd_playid(self, conn, track_id=0):
track_id = cast_arg(int, track_id)
@@ -703,7 +722,7 @@ def cmd_stop(self, conn):
"""Stop playback."""
self.current_index = -1
self.paused = False
- self._send_event('player')
+ self._send_event("player")
def cmd_seek(self, conn, index, pos):
"""Seek to a specified point in a specified song."""
@@ -711,7 +730,7 @@ def cmd_seek(self, conn, index, pos):
if index < 0 or index >= len(self.playlist):
raise ArgumentIndexError()
self.current_index = index
- self._send_event('player')
+ self._send_event("player")
def cmd_seekid(self, conn, track_id, pos):
index = self._id_to_index(track_id)
@@ -725,23 +744,21 @@ def cmd_crash_TypeError(self, conn): # noqa: N802
without crashing, and that this is not treated as ERROR_ARG (since it
is caused by a programming error, not a protocol error).
"""
- 'a' + 2
+ "a" + 2
class Connection:
- """A connection between a client and the server.
- """
+ """A connection between a client and the server."""
+
def __init__(self, server, sock):
- """Create a new connection for the accepted socket `client`.
- """
+ """Create a new connection for the accepted socket `client`."""
self.server = server
self.sock = sock
- self.address = '{}:{}'.format(*sock.sock.getpeername())
+ self.address = "{}:{}".format(*sock.sock.getpeername())
- def debug(self, message, kind=' '):
- """Log a debug message about this connection.
- """
- self.server._log.debug('{}[{}]: {}', kind, self.address, message)
+ def debug(self, message, kind=" "):
+ """Log a debug message about this connection."""
+ self.server._log.debug("{}[{}]: {}", kind, self.address, message)
def run(self):
pass
@@ -756,26 +773,25 @@ def send(self, lines):
lines = [lines]
out = NEWLINE.join(lines) + NEWLINE
for l in out.split(NEWLINE)[:-1]:
- self.debug(l, kind='>')
+ self.debug(l, kind=">")
if isinstance(out, str):
- out = out.encode('utf-8')
+ out = out.encode("utf-8")
return self.sock.sendall(out)
@classmethod
def handler(cls, server):
def _handle(sock):
- """Creates a new `Connection` and runs it.
- """
+ """Creates a new `Connection` and runs it."""
return cls(server, sock).run()
+
return _handle
class MPDConnection(Connection):
- """A connection that receives commands from an MPD-compatible client.
- """
+ """A connection that receives commands from an MPD-compatible client."""
+
def __init__(self, server, sock):
- """Create a new connection for the accepted socket `client`.
- """
+ """Create a new connection for the accepted socket `client`."""
super().__init__(server, sock)
self.authenticated = False
self.notifications = set()
@@ -794,23 +810,20 @@ def do_command(self, command):
yield self.send(RESP_OK)
def disconnect(self):
- """The connection has closed for any reason.
- """
+ """The connection has closed for any reason."""
self.server.disconnect(self)
- self.debug('disconnected', kind='*')
+ self.debug("disconnected", kind="*")
def notify(self, event):
- """Queue up an event for sending to this client.
- """
+ """Queue up an event for sending to this client."""
self.notifications.add(event)
def send_notifications(self, force_close_idle=False):
- """Send the client any queued events now.
- """
+ """Send the client any queued events now."""
pending = self.notifications.intersection(self.idle_subscriptions)
try:
for event in pending:
- yield self.send(f'changed: {event}')
+ yield self.send(f"changed: {event}")
if pending or force_close_idle:
self.idle_subscriptions = set()
self.notifications = self.notifications.difference(pending)
@@ -822,7 +835,7 @@ def run(self):
"""Send a greeting to the client and begin processing commands
as they arrive.
"""
- self.debug('connected', kind='*')
+ self.debug("connected", kind="*")
self.server.connect(self)
yield self.send(HELLO)
@@ -834,25 +847,26 @@ def run(self):
break
line = line.strip()
if not line:
- err = BPDError(ERROR_UNKNOWN, 'No command given')
+ err = BPDError(ERROR_UNKNOWN, "No command given")
yield self.send(err.response())
self.disconnect() # Client sent a blank line.
break
- line = line.decode('utf8') # MPD protocol uses UTF-8.
+ line = line.decode("utf8") # MPD protocol uses UTF-8.
for l in line.split(NEWLINE):
- self.debug(l, kind='<')
+ self.debug(l, kind="<")
if self.idle_subscriptions:
# The connection is in idle mode.
- if line == 'noidle':
+ if line == "noidle":
yield bluelet.call(self.send_notifications(True))
else:
- err = BPDError(ERROR_UNKNOWN,
- f'Got command while idle: {line}')
+ err = BPDError(
+ ERROR_UNKNOWN, f"Got command while idle: {line}"
+ )
yield self.send(err.response())
break
continue
- if line == 'noidle':
+ if line == "noidle":
# When not in idle, this command sends no response.
continue
@@ -880,26 +894,25 @@ def run(self):
return
except BPDIdle as e:
self.idle_subscriptions = e.subsystems
- self.debug('awaiting: {}'.format(' '.join(e.subsystems)),
- kind='z')
+ self.debug(
+ "awaiting: {}".format(" ".join(e.subsystems)), kind="z"
+ )
yield bluelet.call(self.server.dispatch_events())
class ControlConnection(Connection):
- """A connection used to control BPD for debugging and internal events.
- """
+ """A connection used to control BPD for debugging and internal events."""
+
def __init__(self, server, sock):
- """Create a new connection for the accepted socket `client`.
- """
+ """Create a new connection for the accepted socket `client`."""
super().__init__(server, sock)
- def debug(self, message, kind=' '):
- self.server._log.debug('CTRL {}[{}]: {}', kind, self.address, message)
+ def debug(self, message, kind=" "):
+ self.server._log.debug("CTRL {}[{}]: {}", kind, self.address, message)
def run(self):
- """Listen for control commands and delegate to `ctrl_*` methods.
- """
- self.debug('connected', kind='*')
+ """Listen for control commands and delegate to `ctrl_*` methods."""
+ self.debug("connected", kind="*")
while True:
line = yield self.sock.readline()
if not line:
@@ -907,47 +920,45 @@ def run(self):
line = line.strip()
if not line:
break # Client sent a blank line.
- line = line.decode('utf8') # Protocol uses UTF-8.
+ line = line.decode("utf8") # Protocol uses UTF-8.
for l in line.split(NEWLINE):
- self.debug(l, kind='<')
+ self.debug(l, kind="<")
command = Command(line)
try:
- func = command.delegate('ctrl_', self)
+ func = command.delegate("ctrl_", self)
yield bluelet.call(func(*command.args))
except (AttributeError, TypeError) as e:
- yield self.send('ERROR: {}'.format(e.args[0]))
+ yield self.send("ERROR: {}".format(e.args[0]))
except Exception:
- yield self.send(['ERROR: server error',
- traceback.format_exc().rstrip()])
+ yield self.send(
+ ["ERROR: server error", traceback.format_exc().rstrip()]
+ )
def ctrl_play_finished(self):
- """Callback from the player signalling a song finished playing.
- """
+ """Callback from the player signalling a song finished playing."""
yield bluelet.call(self.server.dispatch_events())
def ctrl_profile(self):
- """Memory profiling for debugging.
- """
+ """Memory profiling for debugging."""
from guppy import hpy
+
heap = hpy().heap()
yield self.send(heap)
def ctrl_nickname(self, oldlabel, newlabel):
- """Rename a client in the log messages.
- """
+ """Rename a client in the log messages."""
for c in self.server.connections:
if c.address == oldlabel:
c.address = newlabel
break
else:
- yield self.send(f'ERROR: no such client: {oldlabel}')
+ yield self.send(f"ERROR: no such client: {oldlabel}")
class Command:
- """A command issued by the client for processing by the server.
- """
+ """A command issued by the client for processing by the server."""
- command_re = re.compile(r'^([^ \t]+)[ \t]*')
+ command_re = re.compile(r"^([^ \t]+)[ \t]*")
arg_re = re.compile(r'"((?:\\"|[^"])+)"|([^ \t"]+)')
def __init__(self, s):
@@ -958,12 +969,12 @@ def __init__(self, s):
self.name = command_match.group(1)
self.args = []
- arg_matches = self.arg_re.findall(s[command_match.end():])
+ arg_matches = self.arg_re.findall(s[command_match.end() :])
for match in arg_matches:
if match[0]:
# Quoted argument.
arg = match[0]
- arg = arg.replace('\\"', '"').replace('\\\\', '\\')
+ arg = arg.replace('\\"', '"').replace("\\\\", "\\")
else:
# Unquoted argument.
arg = match[1]
@@ -994,8 +1005,10 @@ def delegate(self, prefix, target, extra_args=0):
wrong_num = (len(self.args) > max_args) or (len(self.args) < min_args)
# If the command accepts a variable number of arguments skip the check.
if wrong_num and not argspec.varargs:
- raise TypeError('wrong number of arguments for "{}"'
- .format(self.name), self.name)
+ raise TypeError(
+ 'wrong number of arguments for "{}"'.format(self.name),
+ self.name,
+ )
return func
@@ -1005,17 +1018,19 @@ def run(self, conn):
"""
try:
# `conn` is an extra argument to all cmd handlers.
- func = self.delegate('cmd_', conn.server, extra_args=1)
+ func = self.delegate("cmd_", conn.server, extra_args=1)
except AttributeError as e:
raise BPDError(ERROR_UNKNOWN, e.args[0])
except TypeError as e:
raise BPDError(ERROR_ARG, e.args[0], self.name)
# Ensure we have permission for this command.
- if conn.server.password and \
- not conn.authenticated and \
- self.name not in SAFE_COMMANDS:
- raise BPDError(ERROR_PERMISSION, 'insufficient privileges')
+ if (
+ conn.server.password
+ and not conn.authenticated
+ and self.name not in SAFE_COMMANDS
+ ):
+ raise BPDError(ERROR_PERMISSION, "insufficient privileges")
try:
args = [conn] + self.args
@@ -1040,8 +1055,8 @@ def run(self, conn):
except Exception:
# An "unintentional" error. Hide it from the client.
- conn.server._log.error('{}', traceback.format_exc())
- raise BPDError(ERROR_SYSTEM, 'server error', self.name)
+ conn.server._log.error("{}", traceback.format_exc())
+ raise BPDError(ERROR_SYSTEM, "server error", self.name)
class CommandList(list):
@@ -1060,8 +1075,7 @@ def __init__(self, sequence=None, verbose=False):
self.verbose = verbose
def run(self, conn):
- """Coroutine executing all the commands in this list.
- """
+ """Coroutine executing all the commands in this list."""
for i, command in enumerate(self):
try:
yield bluelet.call(command.run(conn))
@@ -1079,6 +1093,7 @@ def run(self, conn):
# A subclass of the basic, protocol-handling server that actually plays
# music.
+
class Server(BaseServer):
"""An MPD-compatible server using GStreamer to play audio and beets
to store its library.
@@ -1089,50 +1104,50 @@ def __init__(self, library, host, port, password, ctrl_port, log):
from beetsplug.bpd import gstplayer
except ImportError as e:
# This is a little hacky, but it's the best I know for now.
- if e.args[0].endswith(' gst'):
+ if e.args[0].endswith(" gst"):
raise NoGstreamerError()
else:
raise
- log.info('Starting server...')
+ log.info("Starting server...")
super().__init__(host, port, password, ctrl_port, log)
self.lib = library
self.player = gstplayer.GstPlayer(self.play_finished)
self.cmd_update(None)
- log.info('Server ready and listening on {}:{}'.format(
- host, port))
- log.debug('Listening for control signals on {}:{}'.format(
- host, ctrl_port))
+ log.info("Server ready and listening on {}:{}".format(host, port))
+ log.debug(
+ "Listening for control signals on {}:{}".format(host, ctrl_port)
+ )
def run(self):
self.player.run()
super().run()
def play_finished(self):
- """A callback invoked every time our player finishes a track.
- """
+ """A callback invoked every time our player finishes a track."""
self.cmd_next(None)
- self._ctrl_send('play_finished')
+ self._ctrl_send("play_finished")
# Metadata helper functions.
def _item_info(self, item):
info_lines = [
- 'file: ' + item.destination(fragment=True),
- 'Time: ' + str(int(item.length)),
- 'duration: ' + f'{item.length:.3f}',
- 'Id: ' + str(item.id),
+ "file: " + item.destination(fragment=True),
+ "Time: " + str(int(item.length)),
+ "duration: " + f"{item.length:.3f}",
+ "Id: " + str(item.id),
]
try:
pos = self._id_to_index(item.id)
- info_lines.append('Pos: ' + str(pos))
+ info_lines.append("Pos: " + str(pos))
except ArgumentNotFoundError:
# Don't include position if not in playlist.
pass
for tagtype, field in self.tagtype_map.items():
- info_lines.append('{}: {}'.format(
- tagtype, str(getattr(item, field))))
+ info_lines.append(
+ "{}: {}".format(tagtype, str(getattr(item, field)))
+ )
return info_lines
@@ -1142,11 +1157,11 @@ def _parse_range(self, items, accept_single_number=False):
commands. Sometimes a single number can be provided instead.
"""
try:
- start, stop = str(items).split(':', 1)
+ start, stop = str(items).split(":", 1)
except ValueError:
if accept_single_number:
return [cast_arg(int, items)]
- raise BPDError(ERROR_ARG, 'bad range syntax')
+ raise BPDError(ERROR_ARG, "bad range syntax")
start = cast_arg(int, start)
stop = cast_arg(int, stop)
return range(start, stop)
@@ -1156,17 +1171,16 @@ def _item_id(self, item):
# Database updating.
- def cmd_update(self, conn, path='/'):
- """Updates the catalog to reflect the current database state.
- """
+ def cmd_update(self, conn, path="/"):
+ """Updates the catalog to reflect the current database state."""
# Path is ignored. Also, the real MPD does this asynchronously;
# this is done inline.
- self._log.debug('Building directory tree...')
+ self._log.debug("Building directory tree...")
self.tree = vfs.libtree(self.lib)
- self._log.debug('Finished building directory tree.')
+ self._log.debug("Finished building directory tree.")
self.updated_time = time.time()
- self._send_event('update')
- self._send_event('database')
+ self._send_event("update")
+ self._send_event("database")
# Path (directory tree) browsing.
@@ -1174,7 +1188,7 @@ def _resolve_path(self, path):
"""Returns a VFS node or an item ID located at the path given.
If the path does not exist, raises a
"""
- components = path.split('/')
+ components = path.split("/")
node = self.tree
for component in components:
@@ -1196,15 +1210,15 @@ def _resolve_path(self, path):
def _path_join(self, p1, p2):
"""Smashes together two BPD paths."""
- out = p1 + '/' + p2
- return out.replace('//', '/').replace('//', '/')
+ out = p1 + "/" + p2
+ return out.replace("//", "/").replace("//", "/")
def cmd_lsinfo(self, conn, path="/"):
"""Sends info on all the items in the path."""
node = self._resolve_path(path)
if isinstance(node, int):
# Trying to list a track.
- raise BPDError(ERROR_ARG, 'this is not a directory')
+ raise BPDError(ERROR_ARG, "this is not a directory")
else:
for name, itemid in iter(sorted(node.files.items())):
item = self.lib.get_item(itemid)
@@ -1214,7 +1228,7 @@ def cmd_lsinfo(self, conn, path="/"):
if dirpath.startswith("/"):
# Strip leading slash (libmpc rejects this).
dirpath = dirpath[1:]
- yield 'directory: %s' % dirpath
+ yield "directory: %s" % dirpath
def _listall(self, basepath, node, info=False):
"""Helper function for recursive listing. If info, show
@@ -1226,7 +1240,7 @@ def _listall(self, basepath, node, info=False):
item = self.lib.get_item(node)
yield self._item_info(item)
else:
- yield 'file: ' + basepath
+ yield "file: " + basepath
else:
# List a directory. Recurse into both directories and files.
for name, itemid in sorted(node.files.items()):
@@ -1235,7 +1249,7 @@ def _listall(self, basepath, node, info=False):
yield from self._listall(newpath, itemid, info)
for name, subdir in sorted(node.dirs.items()):
newpath = self._path_join(basepath, name)
- yield 'directory: ' + newpath
+ yield "directory: " + newpath
yield from self._listall(newpath, subdir, info)
def cmd_listall(self, conn, path="/"):
@@ -1249,8 +1263,7 @@ def cmd_listallinfo(self, conn, path="/"):
# Playlist manipulation.
def _all_items(self, node):
- """Generator yielding all items under a VFS node.
- """
+ """Generator yielding all items under a VFS node."""
if isinstance(node, int):
# Could be more efficient if we built up all the IDs and
# then issued a single SELECT.
@@ -1270,9 +1283,9 @@ def _add(self, path, send_id=False):
for item in self._all_items(self._resolve_path(path)):
self.playlist.append(item)
if send_id:
- yield 'Id: ' + str(item.id)
+ yield "Id: " + str(item.id)
self.playlist_version += 1
- self._send_event('playlist')
+ self._send_event("playlist")
def cmd_add(self, conn, path):
"""Adds a track or directory to the playlist, specified by a
@@ -1292,8 +1305,8 @@ def cmd_status(self, conn):
item = self.playlist[self.current_index]
yield (
- 'bitrate: ' + str(item.bitrate / 1000),
- 'audio: {}:{}:{}'.format(
+ "bitrate: " + str(item.bitrate / 1000),
+ "audio: {}:{}:{}".format(
str(item.samplerate),
str(item.bitdepth),
str(item.channels),
@@ -1302,12 +1315,12 @@ def cmd_status(self, conn):
(pos, total) = self.player.time()
yield (
- 'time: {}:{}'.format(
+ "time: {}:{}".format(
str(int(pos)),
str(int(total)),
),
- 'elapsed: ' + f'{pos:.3f}',
- 'duration: ' + f'{total:.3f}',
+ "elapsed: " + f"{pos:.3f}",
+ "duration: " + f"{total:.3f}",
)
# Also missing 'updating_db'.
@@ -1315,55 +1328,57 @@ def cmd_status(self, conn):
def cmd_stats(self, conn):
"""Sends some statistics about the library."""
with self.lib.transaction() as tx:
- statement = 'SELECT COUNT(DISTINCT artist), ' \
- 'COUNT(DISTINCT album), ' \
- 'COUNT(id), ' \
- 'SUM(length) ' \
- 'FROM items'
+ statement = (
+ "SELECT COUNT(DISTINCT artist), "
+ "COUNT(DISTINCT album), "
+ "COUNT(id), "
+ "SUM(length) "
+ "FROM items"
+ )
artists, albums, songs, totaltime = tx.query(statement)[0]
yield (
- 'artists: ' + str(artists),
- 'albums: ' + str(albums),
- 'songs: ' + str(songs),
- 'uptime: ' + str(int(time.time() - self.startup_time)),
- 'playtime: ' + '0', # Missing.
- 'db_playtime: ' + str(int(totaltime)),
- 'db_update: ' + str(int(self.updated_time)),
+ "artists: " + str(artists),
+ "albums: " + str(albums),
+ "songs: " + str(songs),
+ "uptime: " + str(int(time.time() - self.startup_time)),
+ "playtime: " + "0", # Missing.
+ "db_playtime: " + str(int(totaltime)),
+ "db_update: " + str(int(self.updated_time)),
)
def cmd_decoders(self, conn):
"""Send list of supported decoders and formats."""
decoders = self.player.get_decoders()
for name, (mimes, exts) in decoders.items():
- yield f'plugin: {name}'
+ yield f"plugin: {name}"
for ext in exts:
- yield f'suffix: {ext}'
+ yield f"suffix: {ext}"
for mime in mimes:
- yield f'mime_type: {mime}'
+ yield f"mime_type: {mime}"
# Searching.
tagtype_map = {
- 'Artist': 'artist',
- 'ArtistSort': 'artist_sort',
- 'Album': 'album',
- 'Title': 'title',
- 'Track': 'track',
- 'AlbumArtist': 'albumartist',
- 'AlbumArtistSort': 'albumartist_sort',
- 'Label': 'label',
- 'Genre': 'genre',
- 'Date': 'year',
- 'OriginalDate': 'original_year',
- 'Composer': 'composer',
- 'Disc': 'disc',
- 'Comment': 'comments',
- 'MUSICBRAINZ_TRACKID': 'mb_trackid',
- 'MUSICBRAINZ_ALBUMID': 'mb_albumid',
- 'MUSICBRAINZ_ARTISTID': 'mb_artistid',
- 'MUSICBRAINZ_ALBUMARTISTID': 'mb_albumartistid',
- 'MUSICBRAINZ_RELEASETRACKID': 'mb_releasetrackid',
+ "Artist": "artist",
+ "ArtistSort": "artist_sort",
+ "Album": "album",
+ "Title": "title",
+ "Track": "track",
+ "AlbumArtist": "albumartist",
+ "AlbumArtistSort": "albumartist_sort",
+ "Label": "label",
+ "Genre": "genre",
+ "Date": "year",
+ "OriginalDate": "original_year",
+ "Composer": "composer",
+ "Disc": "disc",
+ "Comment": "comments",
+ "MUSICBRAINZ_TRACKID": "mb_trackid",
+ "MUSICBRAINZ_ALBUMID": "mb_albumid",
+ "MUSICBRAINZ_ARTISTID": "mb_artistid",
+ "MUSICBRAINZ_ALBUMARTISTID": "mb_albumartistid",
+ "MUSICBRAINZ_RELEASETRACKID": "mb_releasetrackid",
}
def cmd_tagtypes(self, conn):
@@ -1371,7 +1386,7 @@ def cmd_tagtypes(self, conn):
searching.
"""
for tag in self.tagtype_map:
- yield 'tagtype: ' + tag
+ yield "tagtype: " + tag
def _tagtype_lookup(self, tag):
"""Uses `tagtype_map` to look up the beets column name for an
@@ -1383,7 +1398,7 @@ def _tagtype_lookup(self, tag):
# Match case-insensitively.
if test_tag.lower() == tag.lower():
return test_tag, key
- raise BPDError(ERROR_UNKNOWN, 'no such tagtype')
+ raise BPDError(ERROR_UNKNOWN, "no such tagtype")
def _metadata_query(self, query_type, any_query_type, kv):
"""Helper function returns a query object that will find items
@@ -1396,13 +1411,15 @@ def _metadata_query(self, query_type, any_query_type, kv):
# Iterate pairwise over the arguments.
it = iter(kv)
for tag, value in zip(it, it):
- if tag.lower() == 'any':
+ if tag.lower() == "any":
if any_query_type:
- queries.append(any_query_type(value,
- ITEM_KEYS_WRITABLE,
- query_type))
+ queries.append(
+ any_query_type(
+ value, ITEM_KEYS_WRITABLE, query_type
+ )
+ )
else:
- raise BPDError(ERROR_UNKNOWN, 'no such tagtype')
+ raise BPDError(ERROR_UNKNOWN, "no such tagtype")
else:
_, key = self._tagtype_lookup(tag)
queries.append(query_type(key, value))
@@ -1412,17 +1429,15 @@ def _metadata_query(self, query_type, any_query_type, kv):
def cmd_search(self, conn, *kv):
"""Perform a substring match for items."""
- query = self._metadata_query(dbcore.query.SubstringQuery,
- dbcore.query.AnyFieldQuery,
- kv)
+ query = self._metadata_query(
+ dbcore.query.SubstringQuery, dbcore.query.AnyFieldQuery, kv
+ )
for item in self.lib.items(query):
yield self._item_info(item)
def cmd_find(self, conn, *kv):
"""Perform an exact match for items."""
- query = self._metadata_query(dbcore.query.MatchQuery,
- None,
- kv)
+ query = self._metadata_query(dbcore.query.MatchQuery, None, kv)
for item in self.lib.items(query):
yield self._item_info(item)
@@ -1432,22 +1447,27 @@ def cmd_list(self, conn, show_tag, *kv):
"""
show_tag_canon, show_key = self._tagtype_lookup(show_tag)
if len(kv) == 1:
- if show_tag_canon == 'Album':
+ if show_tag_canon == "Album":
# If no tag was given, assume artist. This is because MPD
# supports a short version of this command for fetching the
# albums belonging to a particular artist, and some clients
# rely on this behaviour (e.g. MPDroid, M.A.L.P.).
- kv = ('Artist', kv[0])
+ kv = ("Artist", kv[0])
else:
raise BPDError(ERROR_ARG, 'should be "Album" for 3 arguments')
elif len(kv) % 2 != 0:
- raise BPDError(ERROR_ARG, 'Incorrect number of filter arguments')
+ raise BPDError(ERROR_ARG, "Incorrect number of filter arguments")
query = self._metadata_query(dbcore.query.MatchQuery, None, kv)
clause, subvals = query.clause()
- statement = 'SELECT DISTINCT ' + show_key + \
- ' FROM items WHERE ' + clause + \
- ' ORDER BY ' + show_key
+ statement = (
+ "SELECT DISTINCT "
+ + show_key
+ + " FROM items WHERE "
+ + clause
+ + " ORDER BY "
+ + show_key
+ )
self._log.debug(statement)
with self.lib.transaction() as tx:
rows = tx.query(statement, subvals)
@@ -1456,7 +1476,7 @@ def cmd_list(self, conn, show_tag, *kv):
if not row[0]:
# Skip any empty values of the field.
continue
- yield show_tag_canon + ': ' + str(row[0])
+ yield show_tag_canon + ": " + str(row[0])
def cmd_count(self, conn, tag, value):
"""Returns the number and total time of songs matching the
@@ -1468,44 +1488,44 @@ def cmd_count(self, conn, tag, value):
for item in self.lib.items(dbcore.query.MatchQuery(key, value)):
songs += 1
playtime += item.length
- yield 'songs: ' + str(songs)
- yield 'playtime: ' + str(int(playtime))
+ yield "songs: " + str(songs)
+ yield "playtime: " + str(int(playtime))
# Persistent playlist manipulation. In MPD this is an optional feature so
# these dummy implementations match MPD's behaviour with the feature off.
def cmd_listplaylist(self, conn, playlist):
- raise BPDError(ERROR_NO_EXIST, 'No such playlist')
+ raise BPDError(ERROR_NO_EXIST, "No such playlist")
def cmd_listplaylistinfo(self, conn, playlist):
- raise BPDError(ERROR_NO_EXIST, 'No such playlist')
+ raise BPDError(ERROR_NO_EXIST, "No such playlist")
def cmd_listplaylists(self, conn):
- raise BPDError(ERROR_UNKNOWN, 'Stored playlists are disabled')
+ raise BPDError(ERROR_UNKNOWN, "Stored playlists are disabled")
def cmd_load(self, conn, playlist):
- raise BPDError(ERROR_NO_EXIST, 'Stored playlists are disabled')
+ raise BPDError(ERROR_NO_EXIST, "Stored playlists are disabled")
def cmd_playlistadd(self, conn, playlist, uri):
- raise BPDError(ERROR_UNKNOWN, 'Stored playlists are disabled')
+ raise BPDError(ERROR_UNKNOWN, "Stored playlists are disabled")
def cmd_playlistclear(self, conn, playlist):
- raise BPDError(ERROR_UNKNOWN, 'Stored playlists are disabled')
+ raise BPDError(ERROR_UNKNOWN, "Stored playlists are disabled")
def cmd_playlistdelete(self, conn, playlist, index):
- raise BPDError(ERROR_UNKNOWN, 'Stored playlists are disabled')
+ raise BPDError(ERROR_UNKNOWN, "Stored playlists are disabled")
def cmd_playlistmove(self, conn, playlist, from_index, to_index):
- raise BPDError(ERROR_UNKNOWN, 'Stored playlists are disabled')
+ raise BPDError(ERROR_UNKNOWN, "Stored playlists are disabled")
def cmd_rename(self, conn, playlist, new_name):
- raise BPDError(ERROR_UNKNOWN, 'Stored playlists are disabled')
+ raise BPDError(ERROR_UNKNOWN, "Stored playlists are disabled")
def cmd_rm(self, conn, playlist):
- raise BPDError(ERROR_UNKNOWN, 'Stored playlists are disabled')
+ raise BPDError(ERROR_UNKNOWN, "Stored playlists are disabled")
def cmd_save(self, conn, playlist):
- raise BPDError(ERROR_UNKNOWN, 'Stored playlists are disabled')
+ raise BPDError(ERROR_UNKNOWN, "Stored playlists are disabled")
# "Outputs." Just a dummy implementation because we don't control
# any outputs.
@@ -1513,9 +1533,9 @@ def cmd_save(self, conn, playlist):
def cmd_outputs(self, conn):
"""List the available outputs."""
yield (
- 'outputid: 0',
- 'outputname: gstreamer',
- 'outputenabled: 1',
+ "outputid: 0",
+ "outputname: gstreamer",
+ "outputenabled: 1",
)
def cmd_enableoutput(self, conn, output_id):
@@ -1526,7 +1546,7 @@ def cmd_enableoutput(self, conn, output_id):
def cmd_disableoutput(self, conn, output_id):
output_id = cast_arg(int, output_id)
if output_id == 0:
- raise BPDError(ERROR_ARG, 'cannot disable this output')
+ raise BPDError(ERROR_ARG, "cannot disable this output")
else:
raise ArgumentIndexError()
@@ -1574,20 +1594,24 @@ def cmd_setvol(self, conn, vol):
# Beets plugin hooks.
+
class BPDPlugin(BeetsPlugin):
"""Provides the "beet bpd" command for running a music player
server.
"""
+
def __init__(self):
super().__init__()
- self.config.add({
- 'host': '',
- 'port': 6600,
- 'control_port': 6601,
- 'password': '',
- 'volume': VOLUME_MAX,
- })
- self.config['password'].redact = True
+ self.config.add(
+ {
+ "host": "",
+ "port": 6600,
+ "control_port": 6601,
+ "password": "",
+ "volume": VOLUME_MAX,
+ }
+ )
+ self.config["password"].redact = True
def start_bpd(self, lib, host, port, password, volume, ctrl_port):
"""Starts a BPD server."""
@@ -1596,29 +1620,32 @@ def start_bpd(self, lib, host, port, password, volume, ctrl_port):
server.cmd_setvol(None, volume)
server.run()
except NoGstreamerError:
- self._log.error('Gstreamer Python bindings not found.')
- self._log.error('Install "gstreamer1.0" and "python-gi"'
- 'or similar package to use BPD.')
+ self._log.error("Gstreamer Python bindings not found.")
+ self._log.error(
+ 'Install "gstreamer1.0" and "python-gi"'
+ "or similar package to use BPD."
+ )
def commands(self):
cmd = beets.ui.Subcommand(
- 'bpd', help='run an MPD-compatible music player server'
+ "bpd", help="run an MPD-compatible music player server"
)
def func(lib, opts, args):
- host = self.config['host'].as_str()
+ host = self.config["host"].as_str()
host = args.pop(0) if args else host
- port = args.pop(0) if args else self.config['port'].get(int)
+ port = args.pop(0) if args else self.config["port"].get(int)
if args:
ctrl_port = args.pop(0)
else:
- ctrl_port = self.config['control_port'].get(int)
+ ctrl_port = self.config["control_port"].get(int)
if args:
- raise beets.ui.UserError('too many arguments')
- password = self.config['password'].as_str()
- volume = self.config['volume'].get(int)
- self.start_bpd(lib, host, int(port), password, volume,
- int(ctrl_port))
+ raise beets.ui.UserError("too many arguments")
+ password = self.config["password"].as_str()
+ volume = self.config["volume"].get(int)
+ self.start_bpd(
+ lib, host, int(port), password, volume, int(ctrl_port)
+ )
cmd.func = func
return [cmd]
diff --git a/beetsplug/bpd/gstplayer.py b/beetsplug/bpd/gstplayer.py
index 64954b1c15..77ddc1983e 100644
--- a/beetsplug/bpd/gstplayer.py
+++ b/beetsplug/bpd/gstplayer.py
@@ -17,18 +17,19 @@
"""
-import sys
-import time
import _thread
-import os
import copy
+import os
+import sys
+import time
import urllib
-from beets import ui
import gi
-gi.require_version('Gst', '1.0')
-from gi.repository import GLib, Gst # noqa: E402
+from beets import ui
+
+gi.require_version("Gst", "1.0")
+from gi.repository import GLib, Gst # noqa: E402
Gst.init(None)
@@ -128,8 +129,8 @@ def play_file(self, path):
"""
self.player.set_state(Gst.State.NULL)
if isinstance(path, str):
- path = path.encode('utf-8')
- uri = 'file://' + urllib.parse.quote(path)
+ path = path.encode("utf-8")
+ uri = "file://" + urllib.parse.quote(path)
self.player.set_property("uri", uri)
self.player.set_state(Gst.State.PLAYING)
self.playing = True
@@ -175,12 +176,12 @@ def time(self):
posq = self.player.query_position(fmt)
if not posq[0]:
raise QueryError("query_position failed")
- pos = posq[1] / (10 ** 9)
+ pos = posq[1] / (10**9)
lengthq = self.player.query_duration(fmt)
if not lengthq[0]:
raise QueryError("query_duration failed")
- length = lengthq[1] / (10 ** 9)
+ length = lengthq[1] / (10**9)
self.cached_time = (pos, length)
return (pos, length)
@@ -202,7 +203,7 @@ def seek(self, position):
return
fmt = Gst.Format(Gst.Format.TIME)
- ns = position * 10 ** 9 # convert to nanoseconds
+ ns = position * 10**9 # convert to nanoseconds
self.player.seek_simple(fmt, Gst.SeekFlags.FLUSH, ns)
# save new cached time
@@ -223,11 +224,13 @@ def get_decoders():
and file extensions.
"""
# We only care about audio decoder elements.
- filt = (Gst.ELEMENT_FACTORY_TYPE_DEPAYLOADER |
- Gst.ELEMENT_FACTORY_TYPE_DEMUXER |
- Gst.ELEMENT_FACTORY_TYPE_PARSER |
- Gst.ELEMENT_FACTORY_TYPE_DECODER |
- Gst.ELEMENT_FACTORY_TYPE_MEDIA_AUDIO)
+ filt = (
+ Gst.ELEMENT_FACTORY_TYPE_DEPAYLOADER
+ | Gst.ELEMENT_FACTORY_TYPE_DEMUXER
+ | Gst.ELEMENT_FACTORY_TYPE_PARSER
+ | Gst.ELEMENT_FACTORY_TYPE_DECODER
+ | Gst.ELEMENT_FACTORY_TYPE_MEDIA_AUDIO
+ )
decoders = {}
mime_types = set()
@@ -239,7 +242,7 @@ def get_decoders():
for i in range(caps.get_size()):
struct = caps.get_structure(i)
mime = struct.get_name()
- if mime == 'unknown/unknown':
+ if mime == "unknown/unknown":
continue
mimes.add(mime)
mime_types.add(mime)
@@ -295,10 +298,9 @@ def next_song():
time.sleep(1)
-if __name__ == '__main__':
+if __name__ == "__main__":
# A very simple command-line player. Just give it names of audio
# files on the command line; these are all played in sequence.
- paths = [os.path.abspath(os.path.expanduser(p))
- for p in sys.argv[1:]]
+ paths = [os.path.abspath(os.path.expanduser(p)) for p in sys.argv[1:]]
# play_simple(paths)
play_complicated(paths)
diff --git a/beetsplug/bpm.py b/beetsplug/bpm.py
index 5aa2d95aab..3edcbef827 100644
--- a/beetsplug/bpm.py
+++ b/beetsplug/bpm.py
@@ -30,7 +30,7 @@ def bpm(max_strokes):
for i in range(max_strokes):
# Press enter to the rhythm...
s = input()
- if s == '':
+ if s == "":
t1 = time.time()
# Only start measuring at the second stroke
if t0:
@@ -46,18 +46,20 @@ def bpm(max_strokes):
class BPMPlugin(BeetsPlugin):
-
def __init__(self):
super().__init__()
- self.config.add({
- 'max_strokes': 3,
- 'overwrite': True,
- })
+ self.config.add(
+ {
+ "max_strokes": 3,
+ "overwrite": True,
+ }
+ )
def commands(self):
- cmd = ui.Subcommand('bpm',
- help='determine bpm of a song by pressing '
- 'a key to the rhythm')
+ cmd = ui.Subcommand(
+ "bpm",
+ help="determine bpm of a song by pressing " "a key to the rhythm",
+ )
cmd.func = self.command
return [cmd]
@@ -67,21 +69,23 @@ def command(self, lib, opts, args):
self.get_bpm(items, write)
def get_bpm(self, items, write=False):
- overwrite = self.config['overwrite'].get(bool)
+ overwrite = self.config["overwrite"].get(bool)
if len(items) > 1:
- raise ValueError('Can only get bpm of one song at time')
+ raise ValueError("Can only get bpm of one song at time")
item = items[0]
- if item['bpm']:
- self._log.info('Found bpm {0}', item['bpm'])
+ if item["bpm"]:
+ self._log.info("Found bpm {0}", item["bpm"])
if not overwrite:
return
- self._log.info('Press Enter {0} times to the rhythm or Ctrl-D '
- 'to exit', self.config['max_strokes'].get(int))
- new_bpm = bpm(self.config['max_strokes'].get(int))
- item['bpm'] = int(new_bpm)
+ self._log.info(
+ "Press Enter {0} times to the rhythm or Ctrl-D " "to exit",
+ self.config["max_strokes"].get(int),
+ )
+ new_bpm = bpm(self.config["max_strokes"].get(int))
+ item["bpm"] = int(new_bpm)
if write:
item.try_write()
item.store()
- self._log.info('Added new bpm {0}', item['bpm'])
+ self._log.info("Added new bpm {0}", item["bpm"])
diff --git a/beetsplug/bpsync.py b/beetsplug/bpsync.py
index 5b28d6d2b8..4f3e0e907f 100644
--- a/beetsplug/bpsync.py
+++ b/beetsplug/bpsync.py
@@ -15,8 +15,8 @@
"""Update library's tags using Beatport.
"""
-from beets.plugins import BeetsPlugin, apply_item_changes
from beets import autotag, library, ui, util
+from beets.plugins import BeetsPlugin, apply_item_changes
from .beatport import BeatportPlugin
@@ -28,33 +28,33 @@ def __init__(self):
self.beatport_plugin.setup()
def commands(self):
- cmd = ui.Subcommand('bpsync', help='update metadata from Beatport')
+ cmd = ui.Subcommand("bpsync", help="update metadata from Beatport")
cmd.parser.add_option(
- '-p',
- '--pretend',
- action='store_true',
- help='show all changes but do nothing',
+ "-p",
+ "--pretend",
+ action="store_true",
+ help="show all changes but do nothing",
)
cmd.parser.add_option(
- '-m',
- '--move',
- action='store_true',
- dest='move',
+ "-m",
+ "--move",
+ action="store_true",
+ dest="move",
help="move files in the library directory",
)
cmd.parser.add_option(
- '-M',
- '--nomove',
- action='store_false',
- dest='move',
+ "-M",
+ "--nomove",
+ action="store_false",
+ dest="move",
help="don't move files in library",
)
cmd.parser.add_option(
- '-W',
- '--nowrite',
- action='store_false',
+ "-W",
+ "--nowrite",
+ action="store_false",
default=None,
- dest='write',
+ dest="write",
help="don't write updated metadata to files",
)
cmd.parser.add_format_option()
@@ -62,8 +62,7 @@ def commands(self):
return [cmd]
def func(self, lib, opts, args):
- """Command handler for the bpsync function.
- """
+ """Command handler for the bpsync function."""
move = ui.should_move(opts.move)
pretend = opts.pretend
write = ui.should_write(opts.write)
@@ -76,16 +75,16 @@ def singletons(self, lib, query, move, pretend, write):
"""Retrieve and apply info from the autotagger for items matched by
query.
"""
- for item in lib.items(query + ['singleton:true']):
+ for item in lib.items(query + ["singleton:true"]):
if not item.mb_trackid:
self._log.info(
- 'Skipping singleton with no mb_trackid: {}', item
+ "Skipping singleton with no mb_trackid: {}", item
)
continue
if not self.is_beatport_track(item):
self._log.info(
- 'Skipping non-{} singleton: {}',
+ "Skipping non-{} singleton: {}",
self.beatport_plugin.data_source,
item,
)
@@ -100,27 +99,27 @@ def singletons(self, lib, query, move, pretend, write):
@staticmethod
def is_beatport_track(item):
return (
- item.get('data_source') == BeatportPlugin.data_source
+ item.get("data_source") == BeatportPlugin.data_source
and item.mb_trackid.isnumeric()
)
def get_album_tracks(self, album):
if not album.mb_albumid:
- self._log.info('Skipping album with no mb_albumid: {}', album)
+ self._log.info("Skipping album with no mb_albumid: {}", album)
return False
if not album.mb_albumid.isnumeric():
self._log.info(
- 'Skipping album with invalid {} ID: {}',
+ "Skipping album with invalid {} ID: {}",
self.beatport_plugin.data_source,
album,
)
return False
items = list(album.items())
- if album.get('data_source') == self.beatport_plugin.data_source:
+ if album.get("data_source") == self.beatport_plugin.data_source:
return items
if not all(self.is_beatport_track(item) for item in items):
self._log.info(
- 'Skipping non-{} release: {}',
+ "Skipping non-{} release: {}",
self.beatport_plugin.data_source,
album,
)
@@ -142,7 +141,7 @@ def albums(self, lib, query, move, pretend, write):
albuminfo = self.beatport_plugin.album_for_id(album.mb_albumid)
if not albuminfo:
self._log.info(
- 'Release ID {} not found for album {}',
+ "Release ID {} not found for album {}",
album.mb_albumid,
album,
)
@@ -159,7 +158,7 @@ def albums(self, lib, query, move, pretend, write):
for track_id, item in library_trackid_to_item.items()
}
- self._log.info('applying changes to {}', album)
+ self._log.info("applying changes to {}", album)
with lib.transaction():
autotag.apply_metadata(albuminfo, item_to_trackinfo)
changed = False
@@ -182,5 +181,5 @@ def albums(self, lib, query, move, pretend, write):
# Move album art (and any inconsistent items).
if move and lib.directory in util.ancestry(items[0].path):
- self._log.debug('moving album {}', album)
+ self._log.debug("moving album {}", album)
album.move()
diff --git a/beetsplug/bucket.py b/beetsplug/bucket.py
index 9ed50b45c5..59ee080bb1 100644
--- a/beetsplug/bucket.py
+++ b/beetsplug/bucket.py
@@ -16,14 +16,13 @@
"""
-from datetime import datetime
import re
import string
+from datetime import datetime
from itertools import tee
from beets import plugins, ui
-
ASCII_DIGITS = string.digits + string.ascii_lowercase
@@ -39,12 +38,10 @@ def pairwise(iterable):
def span_from_str(span_str):
- """Build a span dict from the span string representation.
- """
+ """Build a span dict from the span string representation."""
def normalize_year(d, yearfrom):
- """Convert string to a 4 digits year
- """
+ """Convert string to a 4 digits year"""
if yearfrom < 100:
raise BucketError("%d must be expressed on 4 digits" % yearfrom)
@@ -57,31 +54,33 @@ def normalize_year(d, yearfrom):
d = (yearfrom - yearfrom % 100) + d
return d
- years = [int(x) for x in re.findall(r'\d+', span_str)]
+ years = [int(x) for x in re.findall(r"\d+", span_str)]
if not years:
- raise ui.UserError("invalid range defined for year bucket '%s': no "
- "year found" % span_str)
+ raise ui.UserError(
+ "invalid range defined for year bucket '%s': no "
+ "year found" % span_str
+ )
try:
years = [normalize_year(x, years[0]) for x in years]
except BucketError as exc:
- raise ui.UserError("invalid range defined for year bucket '%s': %s" %
- (span_str, exc))
+ raise ui.UserError(
+ "invalid range defined for year bucket '%s': %s" % (span_str, exc)
+ )
- res = {'from': years[0], 'str': span_str}
+ res = {"from": years[0], "str": span_str}
if len(years) > 1:
- res['to'] = years[-1]
+ res["to"] = years[-1]
return res
def complete_year_spans(spans):
- """Set the `to` value of spans if empty and sort them chronologically.
- """
- spans.sort(key=lambda x: x['from'])
- for (x, y) in pairwise(spans):
- if 'to' not in x:
- x['to'] = y['from'] - 1
- if spans and 'to' not in spans[-1]:
- spans[-1]['to'] = datetime.now().year
+ """Set the `to` value of spans if empty and sort them chronologically."""
+ spans.sort(key=lambda x: x["from"])
+ for x, y in pairwise(spans):
+ if "to" not in x:
+ x["to"] = y["from"] - 1
+ if spans and "to" not in spans[-1]:
+ spans[-1]["to"] = datetime.now().year
def extend_year_spans(spans, spanlen, start=1900, end=2014):
@@ -89,17 +88,17 @@ def extend_year_spans(spans, spanlen, start=1900, end=2014):
belongs to a span.
"""
extended_spans = spans[:]
- for (x, y) in pairwise(spans):
+ for x, y in pairwise(spans):
# if a gap between two spans, fill the gap with as much spans of
# spanlen length as necessary
- for span_from in range(x['to'] + 1, y['from'], spanlen):
- extended_spans.append({'from': span_from})
+ for span_from in range(x["to"] + 1, y["from"], spanlen):
+ extended_spans.append({"from": span_from})
# Create spans prior to declared ones
- for span_from in range(spans[0]['from'] - spanlen, start, -spanlen):
- extended_spans.append({'from': span_from})
+ for span_from in range(spans[0]["from"] - spanlen, start, -spanlen):
+ extended_spans.append({"from": span_from})
# Create spans after the declared ones
- for span_from in range(spans[-1]['to'] + 1, end, spanlen):
- extended_spans.append({'from': span_from})
+ for span_from in range(spans[-1]["to"] + 1, end, spanlen):
+ extended_spans.append({"from": span_from})
complete_year_spans(extended_spans)
return extended_spans
@@ -117,25 +116,29 @@ def build_year_spans(year_spans_str):
def str2fmt(s):
- """Deduces formatting syntax from a span string.
- """
- regex = re.compile(r"(?P\D*)(?P\d+)(?P\D*)"
- r"(?P\d*)(?P\D*)")
+ """Deduces formatting syntax from a span string."""
+ regex = re.compile(
+ r"(?P\D*)(?P\d+)(?P\D*)"
+ r"(?P\d*)(?P\D*)"
+ )
m = re.match(regex, s)
- res = {'fromnchars': len(m.group('fromyear')),
- 'tonchars': len(m.group('toyear'))}
- res['fmt'] = "{}%s{}{}{}".format(m.group('bef'),
- m.group('sep'),
- '%s' if res['tonchars'] else '',
- m.group('after'))
+ res = {
+ "fromnchars": len(m.group("fromyear")),
+ "tonchars": len(m.group("toyear")),
+ }
+ res["fmt"] = "{}%s{}{}{}".format(
+ m.group("bef"),
+ m.group("sep"),
+ "%s" if res["tonchars"] else "",
+ m.group("after"),
+ )
return res
def format_span(fmt, yearfrom, yearto, fromnchars, tonchars):
- """Return a span string representation.
- """
- args = (str(yearfrom)[-fromnchars:])
+ """Return a span string representation."""
+ args = str(yearfrom)[-fromnchars:]
if tonchars:
args = (str(yearfrom)[-fromnchars:], str(yearto)[-tonchars:])
@@ -143,11 +146,10 @@ def format_span(fmt, yearfrom, yearto, fromnchars, tonchars):
def extract_modes(spans):
- """Extract the most common spans lengths and representation formats
- """
- rangelen = sorted([x['to'] - x['from'] + 1 for x in spans])
+ """Extract the most common spans lengths and representation formats"""
+ rangelen = sorted([x["to"] - x["from"] + 1 for x in spans])
deflen = sorted(rangelen, key=rangelen.count)[-1]
- reprs = [str2fmt(x['str']) for x in spans]
+ reprs = [str2fmt(x["str"]) for x in spans]
deffmt = sorted(reprs, key=reprs.count)[-1]
return deflen, deffmt
@@ -167,13 +169,16 @@ def build_alpha_spans(alpha_spans_str, alpha_regexs):
begin_index = ASCII_DIGITS.index(bucket[0])
end_index = ASCII_DIGITS.index(bucket[-1])
else:
- raise ui.UserError("invalid range defined for alpha bucket "
- "'%s': no alphanumeric character found" %
- elem)
+ raise ui.UserError(
+ "invalid range defined for alpha bucket "
+ "'%s': no alphanumeric character found" % elem
+ )
spans.append(
re.compile(
- "^[" + ASCII_DIGITS[begin_index:end_index + 1] +
- ASCII_DIGITS[begin_index:end_index + 1].upper() + "]"
+ "^["
+ + ASCII_DIGITS[begin_index : end_index + 1]
+ + ASCII_DIGITS[begin_index : end_index + 1].upper()
+ + "]"
)
)
return spans
@@ -182,29 +187,32 @@ def build_alpha_spans(alpha_spans_str, alpha_regexs):
class BucketPlugin(plugins.BeetsPlugin):
def __init__(self):
super().__init__()
- self.template_funcs['bucket'] = self._tmpl_bucket
-
- self.config.add({
- 'bucket_year': [],
- 'bucket_alpha': [],
- 'bucket_alpha_regex': {},
- 'extrapolate': False
- })
+ self.template_funcs["bucket"] = self._tmpl_bucket
+
+ self.config.add(
+ {
+ "bucket_year": [],
+ "bucket_alpha": [],
+ "bucket_alpha_regex": {},
+ "extrapolate": False,
+ }
+ )
self.setup()
def setup(self):
- """Setup plugin from config options
- """
- self.year_spans = build_year_spans(self.config['bucket_year'].get())
- if self.year_spans and self.config['extrapolate']:
- [self.ys_len_mode,
- self.ys_repr_mode] = extract_modes(self.year_spans)
- self.year_spans = extend_year_spans(self.year_spans,
- self.ys_len_mode)
+ """Setup plugin from config options"""
+ self.year_spans = build_year_spans(self.config["bucket_year"].get())
+ if self.year_spans and self.config["extrapolate"]:
+ [self.ys_len_mode, self.ys_repr_mode] = extract_modes(
+ self.year_spans
+ )
+ self.year_spans = extend_year_spans(
+ self.year_spans, self.ys_len_mode
+ )
self.alpha_spans = build_alpha_spans(
- self.config['bucket_alpha'].get(),
- self.config['bucket_alpha_regex'].get()
+ self.config["bucket_alpha"].get(),
+ self.config["bucket_alpha_regex"].get(),
)
def find_bucket_year(self, year):
@@ -212,30 +220,33 @@ def find_bucket_year(self, year):
if no matching bucket.
"""
for ys in self.year_spans:
- if ys['from'] <= int(year) <= ys['to']:
- if 'str' in ys:
- return ys['str']
+ if ys["from"] <= int(year) <= ys["to"]:
+ if "str" in ys:
+ return ys["str"]
else:
- return format_span(self.ys_repr_mode['fmt'],
- ys['from'], ys['to'],
- self.ys_repr_mode['fromnchars'],
- self.ys_repr_mode['tonchars'])
+ return format_span(
+ self.ys_repr_mode["fmt"],
+ ys["from"],
+ ys["to"],
+ self.ys_repr_mode["fromnchars"],
+ self.ys_repr_mode["tonchars"],
+ )
return year
def find_bucket_alpha(self, s):
"""Return alpha-range bucket that matches given string or return the
string initial if no matching bucket.
"""
- for (i, span) in enumerate(self.alpha_spans):
+ for i, span in enumerate(self.alpha_spans):
if span.match(s):
- return self.config['bucket_alpha'].get()[i]
+ return self.config["bucket_alpha"].get()[i]
return s[0].upper()
def _tmpl_bucket(self, text, field=None):
if not field and len(text) == 4 and text.isdigit():
- field = 'year'
+ field = "year"
- if field == 'year':
+ if field == "year":
func = self.find_bucket_year
else:
func = self.find_bucket_alpha
diff --git a/beetsplug/chroma.py b/beetsplug/chroma.py
index 353923aaba..369a3cc731 100644
--- a/beetsplug/chroma.py
+++ b/beetsplug/chroma.py
@@ -16,18 +16,17 @@
autotagger. Requires the pyacoustid library.
"""
-from beets import plugins
-from beets import ui
-from beets import util
-from beets import config
-from beets.autotag import hooks
-import confuse
-import acoustid
+import re
from collections import defaultdict
from functools import partial
-import re
-API_KEY = '1vOwZtEn'
+import acoustid
+import confuse
+
+from beets import config, plugins, ui, util
+from beets.autotag import hooks
+
+API_KEY = "1vOwZtEn"
SCORE_THRESH = 0.5
TRACK_ID_WEIGHT = 10.0
COMMON_REL_THRESH = 0.6 # How many tracks must have an album in common?
@@ -49,8 +48,7 @@
def prefix(it, count):
- """Truncate an iterable to at most `count` items.
- """
+ """Truncate an iterable to at most `count` items."""
for i, v in enumerate(it):
if i >= count:
break
@@ -58,13 +56,12 @@ def prefix(it, count):
def releases_key(release, countries, original_year):
- """Used as a key to sort releases by date then preferred country
- """
- date = release.get('date')
+ """Used as a key to sort releases by date then preferred country"""
+ date = release.get("date")
if date and original_year:
- year = date.get('year', 9999)
- month = date.get('month', 99)
- day = date.get('day', 99)
+ year = date.get("year", 9999)
+ month = date.get("month", 99)
+ day = date.get("day", 99)
else:
year = 9999
month = 99
@@ -72,9 +69,9 @@ def releases_key(release, countries, original_year):
# Uses index of preferred countries to sort
country_key = 99
- if release.get('country'):
+ if release.get("country"):
for i, country in enumerate(countries):
- if country.match(release['country']):
+ if country.match(release["country"]):
country_key = i
break
@@ -88,56 +85,63 @@ def acoustid_match(log, path):
try:
duration, fp = acoustid.fingerprint_file(util.syspath(path))
except acoustid.FingerprintGenerationError as exc:
- log.error('fingerprinting of {0} failed: {1}',
- util.displayable_path(repr(path)), exc)
+ log.error(
+ "fingerprinting of {0} failed: {1}",
+ util.displayable_path(repr(path)),
+ exc,
+ )
return None
fp = fp.decode()
_fingerprints[path] = fp
try:
- res = acoustid.lookup(API_KEY, fp, duration,
- meta='recordings releases')
+ res = acoustid.lookup(API_KEY, fp, duration, meta="recordings releases")
except acoustid.AcoustidError as exc:
- log.debug('fingerprint matching {0} failed: {1}',
- util.displayable_path(repr(path)), exc)
+ log.debug(
+ "fingerprint matching {0} failed: {1}",
+ util.displayable_path(repr(path)),
+ exc,
+ )
return None
- log.debug('chroma: fingerprinted {0}',
- util.displayable_path(repr(path)))
+ log.debug("chroma: fingerprinted {0}", util.displayable_path(repr(path)))
# Ensure the response is usable and parse it.
- if res['status'] != 'ok' or not res.get('results'):
- log.debug('no match found')
+ if res["status"] != "ok" or not res.get("results"):
+ log.debug("no match found")
return None
- result = res['results'][0] # Best match.
- if result['score'] < SCORE_THRESH:
- log.debug('no results above threshold')
+ result = res["results"][0] # Best match.
+ if result["score"] < SCORE_THRESH:
+ log.debug("no results above threshold")
return None
- _acoustids[path] = result['id']
+ _acoustids[path] = result["id"]
# Get recording and releases from the result
- if not result.get('recordings'):
- log.debug('no recordings found')
+ if not result.get("recordings"):
+ log.debug("no recordings found")
return None
recording_ids = []
releases = []
- for recording in result['recordings']:
- recording_ids.append(recording['id'])
- if 'releases' in recording:
- releases.extend(recording['releases'])
+ for recording in result["recordings"]:
+ recording_ids.append(recording["id"])
+ if "releases" in recording:
+ releases.extend(recording["releases"])
# The releases list is essentially in random order from the Acoustid lookup
# so we optionally sort it using the match.preferred configuration options.
# 'original_year' to sort the earliest first and
# 'countries' to then sort preferred countries first.
- country_patterns = config['match']['preferred']['countries'].as_str_seq()
+ country_patterns = config["match"]["preferred"]["countries"].as_str_seq()
countries = [re.compile(pat, re.I) for pat in country_patterns]
- original_year = config['match']['preferred']['original_year']
- releases.sort(key=partial(releases_key,
- countries=countries,
- original_year=original_year))
- release_ids = [rel['id'] for rel in releases]
-
- log.debug('matched recordings {0} on releases {1}',
- recording_ids, release_ids)
+ original_year = config["match"]["preferred"]["original_year"]
+ releases.sort(
+ key=partial(
+ releases_key, countries=countries, original_year=original_year
+ )
+ )
+ release_ids = [rel["id"] for rel in releases]
+
+ log.debug(
+ "matched recordings {0} on releases {1}", recording_ids, release_ids
+ )
_matches[path] = recording_ids, release_ids
@@ -167,14 +171,16 @@ class AcoustidPlugin(plugins.BeetsPlugin):
def __init__(self):
super().__init__()
- self.config.add({
- 'auto': True,
- })
- config['acoustid']['apikey'].redact = True
+ self.config.add(
+ {
+ "auto": True,
+ }
+ )
+ config["acoustid"]["apikey"].redact = True
- if self.config['auto']:
- self.register_listener('import_task_start', self.fingerprint_task)
- self.register_listener('import_task_apply', apply_acoustid_metadata)
+ if self.config["auto"]:
+ self.register_listener("import_task_start", self.fingerprint_task)
+ self.register_listener("import_task_apply", apply_acoustid_metadata)
def fingerprint_task(self, task, session):
return fingerprint_task(self._log, task, session)
@@ -186,7 +192,7 @@ def track_distance(self, item, info):
return dist
recording_ids, _ = _matches[item.path]
- dist.add_expr('track_id', info.track_id not in recording_ids)
+ dist.add_expr("track_id", info.track_id not in recording_ids)
return dist
def candidates(self, items, artist, album, va_likely, extra_tags=None):
@@ -196,7 +202,7 @@ def candidates(self, items, artist, album, va_likely, extra_tags=None):
if album:
albums.append(album)
- self._log.debug('acoustid album candidates: {0}', len(albums))
+ self._log.debug("acoustid album candidates: {0}", len(albums))
return albums
def item_candidates(self, item, artist, title):
@@ -209,29 +215,31 @@ def item_candidates(self, item, artist, title):
track = hooks.track_for_mbid(recording_id)
if track:
tracks.append(track)
- self._log.debug('acoustid item candidates: {0}', len(tracks))
+ self._log.debug("acoustid item candidates: {0}", len(tracks))
return tracks
def commands(self):
- submit_cmd = ui.Subcommand('submit',
- help='submit Acoustid fingerprints')
+ submit_cmd = ui.Subcommand(
+ "submit", help="submit Acoustid fingerprints"
+ )
def submit_cmd_func(lib, opts, args):
try:
- apikey = config['acoustid']['apikey'].as_str()
+ apikey = config["acoustid"]["apikey"].as_str()
except confuse.NotFoundError:
- raise ui.UserError('no Acoustid user API key provided')
+ raise ui.UserError("no Acoustid user API key provided")
submit_items(self._log, apikey, lib.items(ui.decargs(args)))
+
submit_cmd.func = submit_cmd_func
fingerprint_cmd = ui.Subcommand(
- 'fingerprint',
- help='generate fingerprints for items without them'
+ "fingerprint", help="generate fingerprints for items without them"
)
def fingerprint_cmd_func(lib, opts, args):
for item in lib.items(ui.decargs(args)):
fingerprint_item(self._log, item, write=ui.should_write())
+
fingerprint_cmd.func = fingerprint_cmd_func
return [submit_cmd, fingerprint_cmd]
@@ -250,8 +258,7 @@ def fingerprint_task(log, task, session):
def apply_acoustid_metadata(task, session):
- """Apply Acoustid metadata (fingerprint and ID) to the task's items.
- """
+ """Apply Acoustid metadata (fingerprint and ID) to the task's items."""
for item in task.imported_items():
if item.path in _fingerprints:
item.acoustid_fingerprint = _fingerprints[item.path]
@@ -263,17 +270,16 @@ def apply_acoustid_metadata(task, session):
def submit_items(log, userkey, items, chunksize=64):
- """Submit fingerprints for the items to the Acoustid server.
- """
+ """Submit fingerprints for the items to the Acoustid server."""
data = [] # The running list of dictionaries to submit.
def submit_chunk():
"""Submit the current accumulated fingerprint data."""
- log.info('submitting {0} fingerprints', len(data))
+ log.info("submitting {0} fingerprints", len(data))
try:
acoustid.submit(API_KEY, userkey, data)
except acoustid.AcoustidError as exc:
- log.warning('acoustid submission error: {0}', exc)
+ log.warning("acoustid submission error: {0}", exc)
del data[:]
for item in items:
@@ -281,23 +287,25 @@ def submit_chunk():
# Construct a submission dictionary for this item.
item_data = {
- 'duration': int(item.length),
- 'fingerprint': fp,
+ "duration": int(item.length),
+ "fingerprint": fp,
}
if item.mb_trackid:
- item_data['mbid'] = item.mb_trackid
- log.debug('submitting MBID')
+ item_data["mbid"] = item.mb_trackid
+ log.debug("submitting MBID")
else:
- item_data.update({
- 'track': item.title,
- 'artist': item.artist,
- 'album': item.album,
- 'albumartist': item.albumartist,
- 'year': item.year,
- 'trackno': item.track,
- 'discno': item.disc,
- })
- log.debug('submitting textual metadata')
+ item_data.update(
+ {
+ "track": item.title,
+ "artist": item.artist,
+ "album": item.album,
+ "albumartist": item.albumartist,
+ "year": item.year,
+ "trackno": item.track,
+ "discno": item.disc,
+ }
+ )
+ log.debug("submitting textual metadata")
data.append(item_data)
# If we have enough data, submit a chunk.
@@ -318,28 +326,31 @@ def fingerprint_item(log, item, write=False):
"""
# Get a fingerprint and length for this track.
if not item.length:
- log.info('{0}: no duration available',
- util.displayable_path(item.path))
+ log.info("{0}: no duration available", util.displayable_path(item.path))
elif item.acoustid_fingerprint:
if write:
- log.info('{0}: fingerprint exists, skipping',
- util.displayable_path(item.path))
+ log.info(
+ "{0}: fingerprint exists, skipping",
+ util.displayable_path(item.path),
+ )
else:
- log.info('{0}: using existing fingerprint',
- util.displayable_path(item.path))
+ log.info(
+ "{0}: using existing fingerprint",
+ util.displayable_path(item.path),
+ )
return item.acoustid_fingerprint
else:
- log.info('{0}: fingerprinting',
- util.displayable_path(item.path))
+ log.info("{0}: fingerprinting", util.displayable_path(item.path))
try:
_, fp = acoustid.fingerprint_file(util.syspath(item.path))
item.acoustid_fingerprint = fp.decode()
if write:
- log.info('{0}: writing fingerprint',
- util.displayable_path(item.path))
+ log.info(
+ "{0}: writing fingerprint", util.displayable_path(item.path)
+ )
item.try_write()
if item._db:
item.store()
return item.acoustid_fingerprint
except acoustid.FingerprintGenerationError as exc:
- log.info('fingerprint generation failed: {0}', exc)
+ log.info("fingerprint generation failed: {0}", exc)
diff --git a/beetsplug/convert.py b/beetsplug/convert.py
index e47182d92b..51312d77b4 100644
--- a/beetsplug/convert.py
+++ b/beetsplug/convert.py
@@ -14,23 +14,21 @@
"""Converts tracks or albums to external directory
"""
-from beets.util import par_map, arg_encoding
-
+import logging
import os
-import threading
+import shlex
import subprocess
import tempfile
-import shlex
+import threading
from string import Template
-import logging
-from beets import ui, util, plugins, config
-from beets.plugins import BeetsPlugin
from confuse import ConfigTypeError, Optional
-from beets import art
+
+from beets import art, config, plugins, ui, util
+from beets.library import Item, parse_query_string
+from beets.plugins import BeetsPlugin
+from beets.util import arg_encoding, par_map
from beets.util.artresizer import ArtResizer
-from beets.library import parse_query_string
-from beets.library import Item
from beets.util.m3u import M3UFile
_fs_lock = threading.Lock()
@@ -38,11 +36,11 @@
# Some convenient alternate names for formats.
ALIASES = {
- 'windows media': 'wma',
- 'vorbis': 'ogg',
+ "windows media": "wma",
+ "vorbis": "ogg",
}
-LOSSLESS_FORMATS = ['ape', 'flac', 'alac', 'wav', 'aiff']
+LOSSLESS_FORMATS = ["ape", "flac", "alac", "wav", "aiff"]
def replace_ext(path, ext):
@@ -50,59 +48,58 @@ def replace_ext(path, ext):
The new extension must not contain a leading dot.
"""
- ext_dot = b'.' + ext
+ ext_dot = b"." + ext
return os.path.splitext(path)[0] + ext_dot
def get_format(fmt=None):
- """Return the command template and the extension from the config.
- """
+ """Return the command template and the extension from the config."""
if not fmt:
- fmt = config['convert']['format'].as_str().lower()
+ fmt = config["convert"]["format"].as_str().lower()
fmt = ALIASES.get(fmt, fmt)
try:
- format_info = config['convert']['formats'][fmt].get(dict)
- command = format_info['command']
- extension = format_info.get('extension', fmt)
+ format_info = config["convert"]["formats"][fmt].get(dict)
+ command = format_info["command"]
+ extension = format_info.get("extension", fmt)
except KeyError:
raise ui.UserError(
- 'convert: format {} needs the "command" field'
- .format(fmt)
+ 'convert: format {} needs the "command" field'.format(fmt)
)
except ConfigTypeError:
- command = config['convert']['formats'][fmt].get(str)
+ command = config["convert"]["formats"][fmt].get(str)
extension = fmt
# Convenience and backwards-compatibility shortcuts.
- keys = config['convert'].keys()
- if 'command' in keys:
- command = config['convert']['command'].as_str()
- elif 'opts' in keys:
+ keys = config["convert"].keys()
+ if "command" in keys:
+ command = config["convert"]["command"].as_str()
+ elif "opts" in keys:
# Undocumented option for backwards compatibility with < 1.3.1.
- command = 'ffmpeg -i $source -y {} $dest'.format(
- config['convert']['opts'].as_str()
+ command = "ffmpeg -i $source -y {} $dest".format(
+ config["convert"]["opts"].as_str()
)
- if 'extension' in keys:
- extension = config['convert']['extension'].as_str()
+ if "extension" in keys:
+ extension = config["convert"]["extension"].as_str()
- return (command.encode('utf-8'), extension.encode('utf-8'))
+ return (command.encode("utf-8"), extension.encode("utf-8"))
def should_transcode(item, fmt):
"""Determine whether the item should be transcoded as part of
conversion (i.e., its bitrate is high or it has the wrong format).
"""
- no_convert_queries = config['convert']['no_convert'].as_str_seq()
+ no_convert_queries = config["convert"]["no_convert"].as_str_seq()
if no_convert_queries:
for query_string in no_convert_queries:
query, _ = parse_query_string(query_string, Item)
if query.match(item):
return False
- if config['convert']['never_convert_lossy_files'] and \
- not (item.format.lower() in LOSSLESS_FORMATS):
+ if config["convert"]["never_convert_lossy_files"] and not (
+ item.format.lower() in LOSSLESS_FORMATS
+ ):
return False
- maxbr = config['convert']['max_bitrate'].get(Optional(int))
+ maxbr = config["convert"]["max_bitrate"].get(Optional(int))
if maxbr is not None and item.bitrate >= 1000 * maxbr:
return True
return fmt.lower() != item.format.lower()
@@ -111,101 +108,157 @@ def should_transcode(item, fmt):
class ConvertPlugin(BeetsPlugin):
def __init__(self):
super().__init__()
- self.config.add({
- 'dest': None,
- 'pretend': False,
- 'link': False,
- 'hardlink': False,
- 'threads': util.cpu_count(),
- 'format': 'mp3',
- 'id3v23': 'inherit',
- 'formats': {
- 'aac': {
- 'command': 'ffmpeg -i $source -y -vn -acodec aac '
- '-aq 1 $dest',
- 'extension': 'm4a',
- },
- 'alac': {
- 'command': 'ffmpeg -i $source -y -vn -acodec alac $dest',
- 'extension': 'm4a',
+ self.config.add(
+ {
+ "dest": None,
+ "pretend": False,
+ "link": False,
+ "hardlink": False,
+ "threads": util.cpu_count(),
+ "format": "mp3",
+ "id3v23": "inherit",
+ "formats": {
+ "aac": {
+ "command": "ffmpeg -i $source -y -vn -acodec aac "
+ "-aq 1 $dest",
+ "extension": "m4a",
+ },
+ "alac": {
+ "command": "ffmpeg -i $source -y -vn -acodec alac $dest",
+ "extension": "m4a",
+ },
+ "flac": "ffmpeg -i $source -y -vn -acodec flac $dest",
+ "mp3": "ffmpeg -i $source -y -vn -aq 2 $dest",
+ "opus": "ffmpeg -i $source -y -vn -acodec libopus -ab 96k $dest",
+ "ogg": "ffmpeg -i $source -y -vn -acodec libvorbis -aq 3 $dest",
+ "wma": "ffmpeg -i $source -y -vn -acodec wmav2 -vn $dest",
},
- 'flac': 'ffmpeg -i $source -y -vn -acodec flac $dest',
- 'mp3': 'ffmpeg -i $source -y -vn -aq 2 $dest',
- 'opus':
- 'ffmpeg -i $source -y -vn -acodec libopus -ab 96k $dest',
- 'ogg':
- 'ffmpeg -i $source -y -vn -acodec libvorbis -aq 3 $dest',
- 'wma':
- 'ffmpeg -i $source -y -vn -acodec wmav2 -vn $dest',
- },
- 'max_bitrate': None,
- 'auto': False,
- 'auto_keep': False,
- 'tmpdir': None,
- 'quiet': False,
- 'embed': True,
- 'paths': {},
- 'no_convert': '',
- 'never_convert_lossy_files': False,
- 'copy_album_art': False,
- 'album_art_maxwidth': 0,
- 'delete_originals': False,
- 'playlist': None,
- })
+ "max_bitrate": None,
+ "auto": False,
+ "auto_keep": False,
+ "tmpdir": None,
+ "quiet": False,
+ "embed": True,
+ "paths": {},
+ "no_convert": "",
+ "never_convert_lossy_files": False,
+ "copy_album_art": False,
+ "album_art_maxwidth": 0,
+ "delete_originals": False,
+ "playlist": None,
+ }
+ )
self.early_import_stages = [self.auto_convert, self.auto_convert_keep]
- self.register_listener('import_task_files', self._cleanup)
+ self.register_listener("import_task_files", self._cleanup)
def commands(self):
- cmd = ui.Subcommand('convert', help='convert to external location')
- cmd.parser.add_option('-p', '--pretend', action='store_true',
- help='show actions but do nothing')
- cmd.parser.add_option('-t', '--threads', action='store', type='int',
- help='change the number of threads, \
- defaults to maximum available processors')
- cmd.parser.add_option('-k', '--keep-new', action='store_true',
- dest='keep_new', help='keep only the converted \
- and move the old files')
- cmd.parser.add_option('-d', '--dest', action='store',
- help='set the destination directory')
- cmd.parser.add_option('-f', '--format', action='store', dest='format',
- help='set the target format of the tracks')
- cmd.parser.add_option('-y', '--yes', action='store_true', dest='yes',
- help='do not ask for confirmation')
- cmd.parser.add_option('-l', '--link', action='store_true', dest='link',
- help='symlink files that do not \
- need transcoding.')
- cmd.parser.add_option('-H', '--hardlink', action='store_true',
- dest='hardlink',
- help='hardlink files that do not \
- need transcoding. Overrides --link.')
- cmd.parser.add_option('-m', '--playlist', action='store',
- help='''create an m3u8 playlist file containing
+ cmd = ui.Subcommand("convert", help="convert to external location")
+ cmd.parser.add_option(
+ "-p",
+ "--pretend",
+ action="store_true",
+ help="show actions but do nothing",
+ )
+ cmd.parser.add_option(
+ "-t",
+ "--threads",
+ action="store",
+ type="int",
+ help="change the number of threads, \
+ defaults to maximum available processors",
+ )
+ cmd.parser.add_option(
+ "-k",
+ "--keep-new",
+ action="store_true",
+ dest="keep_new",
+ help="keep only the converted \
+ and move the old files",
+ )
+ cmd.parser.add_option(
+ "-d", "--dest", action="store", help="set the destination directory"
+ )
+ cmd.parser.add_option(
+ "-f",
+ "--format",
+ action="store",
+ dest="format",
+ help="set the target format of the tracks",
+ )
+ cmd.parser.add_option(
+ "-y",
+ "--yes",
+ action="store_true",
+ dest="yes",
+ help="do not ask for confirmation",
+ )
+ cmd.parser.add_option(
+ "-l",
+ "--link",
+ action="store_true",
+ dest="link",
+ help="symlink files that do not \
+ need transcoding.",
+ )
+ cmd.parser.add_option(
+ "-H",
+ "--hardlink",
+ action="store_true",
+ dest="hardlink",
+ help="hardlink files that do not \
+ need transcoding. Overrides --link.",
+ )
+ cmd.parser.add_option(
+ "-m",
+ "--playlist",
+ action="store",
+ help="""create an m3u8 playlist file containing
the converted files. The playlist file will be
saved below the destination directory, thus
PLAYLIST could be a file name or a relative path.
To ensure a working playlist when transferred to
a different computer, or opened from an external
drive, relative paths pointing to media files
- will be used.''')
+ will be used.""",
+ )
cmd.parser.add_album_option()
cmd.func = self.convert_func
return [cmd]
def auto_convert(self, config, task):
- if self.config['auto']:
- par_map(lambda item: self.convert_on_import(config.lib, item),
- task.imported_items())
+ if self.config["auto"]:
+ par_map(
+ lambda item: self.convert_on_import(config.lib, item),
+ task.imported_items(),
+ )
def auto_convert_keep(self, config, task):
- if self.config['auto_keep']:
+ if self.config["auto_keep"]:
empty_opts = self.commands()[0].parser.get_default_values()
- (dest, threads, path_formats, fmt, pretend,
- hardlink, link, playlist) = self._get_opts_and_config(empty_opts)
+ (
+ dest,
+ threads,
+ path_formats,
+ fmt,
+ pretend,
+ hardlink,
+ link,
+ playlist,
+ ) = self._get_opts_and_config(empty_opts)
items = task.imported_items()
- self._parallel_convert(dest, False, path_formats, fmt,
- pretend, link, hardlink, threads, items)
+ self._parallel_convert(
+ dest,
+ False,
+ path_formats,
+ fmt,
+ pretend,
+ link,
+ hardlink,
+ threads,
+ items,
+ )
# Utilities converted from functions to methods on logging overhaul
@@ -220,12 +273,12 @@ def encode(self, command, source, dest, pretend=False):
assert isinstance(source, bytes)
assert isinstance(dest, bytes)
- quiet = self.config['quiet'].get(bool)
+ quiet = self.config["quiet"].get(bool)
if not quiet and not pretend:
- self._log.info('Encoding {0}', util.displayable_path(source))
+ self._log.info("Encoding {0}", util.displayable_path(source))
- command = command.decode(arg_encoding(), 'surrogateescape')
+ command = command.decode(arg_encoding(), "surrogateescape")
source = os.fsdecode(source)
dest = os.fsdecode(dest)
@@ -233,42 +286,57 @@ def encode(self, command, source, dest, pretend=False):
args = shlex.split(command)
encode_cmd = []
for i, arg in enumerate(args):
- args[i] = Template(arg).safe_substitute({
- 'source': source,
- 'dest': dest,
- })
+ args[i] = Template(arg).safe_substitute(
+ {
+ "source": source,
+ "dest": dest,
+ }
+ )
encode_cmd.append(args[i].encode(util.arg_encoding()))
if pretend:
- self._log.info('{0}', ' '.join(ui.decargs(args)))
+ self._log.info("{0}", " ".join(ui.decargs(args)))
return
try:
util.command_output(encode_cmd)
except subprocess.CalledProcessError as exc:
# Something went wrong (probably Ctrl+C), remove temporary files
- self._log.info('Encoding {0} failed. Cleaning up...',
- util.displayable_path(source))
- self._log.debug('Command {0} exited with status {1}: {2}',
- args,
- exc.returncode,
- exc.output)
+ self._log.info(
+ "Encoding {0} failed. Cleaning up...",
+ util.displayable_path(source),
+ )
+ self._log.debug(
+ "Command {0} exited with status {1}: {2}",
+ args,
+ exc.returncode,
+ exc.output,
+ )
util.remove(dest)
util.prune_dirs(os.path.dirname(dest))
raise
except OSError as exc:
raise ui.UserError(
"convert: couldn't invoke '{}': {}".format(
- ' '.join(ui.decargs(args)), exc
+ " ".join(ui.decargs(args)), exc
)
)
if not quiet and not pretend:
- self._log.info('Finished encoding {0}',
- util.displayable_path(source))
+ self._log.info(
+ "Finished encoding {0}", util.displayable_path(source)
+ )
- def convert_item(self, dest_dir, keep_new, path_formats, fmt,
- pretend=False, link=False, hardlink=False):
+ def convert_item(
+ self,
+ dest_dir,
+ keep_new,
+ path_formats,
+ fmt,
+ pretend=False,
+ link=False,
+ hardlink=False,
+ ):
"""A pipeline thread that converts `Item` objects from a
library.
"""
@@ -276,8 +344,7 @@ def convert_item(self, dest_dir, keep_new, path_formats, fmt,
item, original, converted = None, None, None
while True:
item = yield (item, original, converted)
- dest = item.destination(basedir=dest_dir,
- path_formats=path_formats)
+ dest = item.destination(basedir=dest_dir, path_formats=path_formats)
# When keeping the new file in the library, we first move the
# current (pristine) file to the destination. We'll then copy it
@@ -301,18 +368,23 @@ def convert_item(self, dest_dir, keep_new, path_formats, fmt,
util.mkdirall(dest)
if os.path.exists(util.syspath(dest)):
- self._log.info('Skipping {0} (target file exists)',
- util.displayable_path(item.path))
+ self._log.info(
+ "Skipping {0} (target file exists)",
+ util.displayable_path(item.path),
+ )
continue
if keep_new:
if pretend:
- self._log.info('mv {0} {1}',
- util.displayable_path(item.path),
- util.displayable_path(original))
+ self._log.info(
+ "mv {0} {1}",
+ util.displayable_path(item.path),
+ util.displayable_path(original),
+ )
else:
- self._log.info('Moving to {0}',
- util.displayable_path(original))
+ self._log.info(
+ "Moving to {0}", util.displayable_path(original)
+ )
util.move(item.path, original)
if should_transcode(item, fmt):
@@ -324,20 +396,25 @@ def convert_item(self, dest_dir, keep_new, path_formats, fmt,
else:
linked = link or hardlink
if pretend:
- msg = 'ln' if hardlink else ('ln -s' if link else 'cp')
-
- self._log.info('{2} {0} {1}',
- util.displayable_path(original),
- util.displayable_path(converted),
- msg)
+ msg = "ln" if hardlink else ("ln -s" if link else "cp")
+
+ self._log.info(
+ "{2} {0} {1}",
+ util.displayable_path(original),
+ util.displayable_path(converted),
+ msg,
+ )
else:
# No transcoding necessary.
- msg = 'Hardlinking' if hardlink \
- else ('Linking' if link else 'Copying')
+ msg = (
+ "Hardlinking"
+ if hardlink
+ else ("Linking" if link else "Copying")
+ )
- self._log.info('{1} {0}',
- util.displayable_path(item.path),
- msg)
+ self._log.info(
+ "{1} {0}", util.displayable_path(item.path), msg
+ )
if hardlink:
util.hardlink(original, converted)
@@ -349,8 +426,8 @@ def convert_item(self, dest_dir, keep_new, path_formats, fmt,
if pretend:
continue
- id3v23 = self.config['id3v23'].as_choice([True, False, 'inherit'])
- if id3v23 == 'inherit':
+ id3v23 = self.config["id3v23"].as_choice([True, False, "inherit"])
+ if id3v23 == "inherit":
id3v23 = None
# Write tags from the database to the converted file.
@@ -363,24 +440,41 @@ def convert_item(self, dest_dir, keep_new, path_formats, fmt,
item.read()
item.store() # Store new path and audio data.
- if self.config['embed'] and not linked:
+ if self.config["embed"] and not linked:
album = item._cached_album
if album and album.artpath:
maxwidth = self._get_art_resize(album.artpath)
- self._log.debug('embedding album art from {}',
- util.displayable_path(album.artpath))
- art.embed_item(self._log, item, album.artpath, maxwidth,
- itempath=converted, id3v23=id3v23)
+ self._log.debug(
+ "embedding album art from {}",
+ util.displayable_path(album.artpath),
+ )
+ art.embed_item(
+ self._log,
+ item,
+ album.artpath,
+ maxwidth,
+ itempath=converted,
+ id3v23=id3v23,
+ )
if keep_new:
- plugins.send('after_convert', item=item,
- dest=dest, keepnew=True)
+ plugins.send(
+ "after_convert", item=item, dest=dest, keepnew=True
+ )
else:
- plugins.send('after_convert', item=item,
- dest=converted, keepnew=False)
+ plugins.send(
+ "after_convert", item=item, dest=converted, keepnew=False
+ )
- def copy_album_art(self, album, dest_dir, path_formats, pretend=False,
- link=False, hardlink=False):
+ def copy_album_art(
+ self,
+ album,
+ dest_dir,
+ path_formats,
+ pretend=False,
+ link=False,
+ hardlink=False,
+ ):
"""Copies or converts the associated cover art of the album. Album must
have at least one track.
"""
@@ -394,8 +488,9 @@ def copy_album_art(self, album, dest_dir, path_formats, pretend=False,
# Get the destination of the first item (track) of the album, we use
# this function to format the path accordingly to path_formats.
- dest = album_item.destination(basedir=dest_dir,
- path_formats=path_formats)
+ dest = album_item.destination(
+ basedir=dest_dir, path_formats=path_formats
+ )
# Remove item from the path.
dest = os.path.join(*util.components(dest)[:-1])
@@ -408,8 +503,10 @@ def copy_album_art(self, album, dest_dir, path_formats, pretend=False,
util.mkdirall(dest)
if os.path.exists(util.syspath(dest)):
- self._log.info('Skipping {0} (target file exists)',
- util.displayable_path(album.artpath))
+ self._log.info(
+ "Skipping {0} (target file exists)",
+ util.displayable_path(album.artpath),
+ )
return
# Decide whether we need to resize the cover-art image.
@@ -417,27 +514,36 @@ def copy_album_art(self, album, dest_dir, path_formats, pretend=False,
# Either copy or resize (while copying) the image.
if maxwidth is not None:
- self._log.info('Resizing cover art from {0} to {1}',
- util.displayable_path(album.artpath),
- util.displayable_path(dest))
+ self._log.info(
+ "Resizing cover art from {0} to {1}",
+ util.displayable_path(album.artpath),
+ util.displayable_path(dest),
+ )
if not pretend:
ArtResizer.shared.resize(maxwidth, album.artpath, dest)
else:
if pretend:
- msg = 'ln' if hardlink else ('ln -s' if link else 'cp')
+ msg = "ln" if hardlink else ("ln -s" if link else "cp")
- self._log.info('{2} {0} {1}',
- util.displayable_path(album.artpath),
- util.displayable_path(dest),
- msg)
+ self._log.info(
+ "{2} {0} {1}",
+ util.displayable_path(album.artpath),
+ util.displayable_path(dest),
+ msg,
+ )
else:
- msg = 'Hardlinking' if hardlink \
- else ('Linking' if link else 'Copying')
+ msg = (
+ "Hardlinking"
+ if hardlink
+ else ("Linking" if link else "Copying")
+ )
- self._log.info('{2} cover art from {0} to {1}',
- util.displayable_path(album.artpath),
- util.displayable_path(dest),
- msg)
+ self._log.info(
+ "{2} cover art from {0} to {1}",
+ util.displayable_path(album.artpath),
+ util.displayable_path(dest),
+ msg,
+ )
if hardlink:
util.hardlink(album.artpath, dest)
elif link:
@@ -446,34 +552,52 @@ def copy_album_art(self, album, dest_dir, path_formats, pretend=False,
util.copy(album.artpath, dest)
def convert_func(self, lib, opts, args):
- (dest, threads, path_formats, fmt,
- pretend, hardlink, link, playlist) = self._get_opts_and_config(opts)
+ (
+ dest,
+ threads,
+ path_formats,
+ fmt,
+ pretend,
+ hardlink,
+ link,
+ playlist,
+ ) = self._get_opts_and_config(opts)
if opts.album:
albums = lib.albums(ui.decargs(args))
items = [i for a in albums for i in a.items()]
if not pretend:
for a in albums:
- ui.print_(format(a, ''))
+ ui.print_(format(a, ""))
else:
items = list(lib.items(ui.decargs(args)))
if not pretend:
for i in items:
- ui.print_(format(i, ''))
+ ui.print_(format(i, ""))
if not items:
- self._log.error('Empty query result.')
+ self._log.error("Empty query result.")
return
if not (pretend or opts.yes or ui.input_yn("Convert? (Y/n)")):
return
- if opts.album and self.config['copy_album_art']:
+ if opts.album and self.config["copy_album_art"]:
for album in albums:
- self.copy_album_art(album, dest, path_formats, pretend,
- link, hardlink)
+ self.copy_album_art(
+ album, dest, path_formats, pretend, link, hardlink
+ )
- self._parallel_convert(dest, opts.keep_new, path_formats, fmt, pretend,
- link, hardlink, threads, items)
+ self._parallel_convert(
+ dest,
+ opts.keep_new,
+ path_formats,
+ fmt,
+ pretend,
+ link,
+ hardlink,
+ threads,
+ items,
+ )
if playlist:
# Playlist paths are understood as relative to the dest directory.
@@ -484,9 +608,17 @@ def convert_func(self, lib, opts, args):
# relative to the playlist's location and translates the unicode
# strings we get from item.destination to bytes.
items_paths = [
- os.path.relpath(util.bytestring_path(item.destination(
- basedir=dest, path_formats=path_formats, fragment=False
- )), pl_dir) for item in items
+ os.path.relpath(
+ util.bytestring_path(
+ item.destination(
+ basedir=dest,
+ path_formats=path_formats,
+ fragment=False,
+ )
+ ),
+ pl_dir,
+ )
+ for item in items
]
if not pretend:
m3ufile = M3UFile(playlist)
@@ -497,15 +629,15 @@ def convert_on_import(self, lib, item):
"""Transcode a file automatically after it is imported into the
library.
"""
- fmt = self.config['format'].as_str().lower()
+ fmt = self.config["format"].as_str().lower()
if should_transcode(item, fmt):
command, ext = get_format()
# Create a temporary file for the conversion.
- tmpdir = self.config['tmpdir'].get()
+ tmpdir = self.config["tmpdir"].get()
if tmpdir:
tmpdir = util.py3_path(util.bytestring_path(tmpdir))
- fd, dest = tempfile.mkstemp(util.py3_path(b'.' + ext), dir=tmpdir)
+ fd, dest = tempfile.mkstemp(util.py3_path(b"." + ext), dir=tmpdir)
os.close(fd)
dest = util.bytestring_path(dest)
_temp_files.append(dest) # Delete the transcode later.
@@ -524,10 +656,10 @@ def convert_on_import(self, lib, item):
item.read() # Load new audio information data.
item.store()
- if self.config['delete_originals']:
+ if self.config["delete_originals"]:
self._log.log(
- logging.DEBUG if self.config['quiet'] else logging.INFO,
- 'Removing original file {0}',
+ logging.DEBUG if self.config["quiet"] else logging.INFO,
+ "Removing original file {0}",
source_path,
)
util.remove(source_path, False)
@@ -538,16 +670,18 @@ def _get_art_resize(self, artpath):
new size. If not, returns None.
"""
newwidth = None
- if self.config['album_art_maxwidth']:
- maxwidth = self.config['album_art_maxwidth'].get(int)
+ if self.config["album_art_maxwidth"]:
+ maxwidth = self.config["album_art_maxwidth"].get(int)
size = ArtResizer.shared.get_size(artpath)
- self._log.debug('image size: {}', size)
+ self._log.debug("image size: {}", size)
if size:
if size[0] > maxwidth:
newwidth = maxwidth
else:
- self._log.warning('Could not get size of image (please see '
- 'documentation for dependencies).')
+ self._log.warning(
+ "Could not get size of image (please see "
+ "documentation for dependencies)."
+ )
return newwidth
def _cleanup(self, task, session):
@@ -562,25 +696,25 @@ def _get_opts_and_config(self, opts):
Get parameters from command line if available,
default to config if not available.
"""
- dest = opts.dest or self.config['dest'].get()
+ dest = opts.dest or self.config["dest"].get()
if not dest:
- raise ui.UserError('no convert destination set')
+ raise ui.UserError("no convert destination set")
dest = util.bytestring_path(dest)
- threads = opts.threads or self.config['threads'].get(int)
+ threads = opts.threads or self.config["threads"].get(int)
- path_formats = ui.get_path_formats(self.config['paths'] or None)
+ path_formats = ui.get_path_formats(self.config["paths"] or None)
- fmt = opts.format or self.config['format'].as_str().lower()
+ fmt = opts.format or self.config["format"].as_str().lower()
- playlist = opts.playlist or self.config['playlist'].get()
+ playlist = opts.playlist or self.config["playlist"].get()
if playlist is not None:
playlist = os.path.join(dest, util.bytestring_path(playlist))
if opts.pretend is not None:
pretend = opts.pretend
else:
- pretend = self.config['pretend'].get(bool)
+ pretend = self.config["pretend"].get(bool)
if opts.hardlink is not None:
hardlink = opts.hardlink
@@ -589,24 +723,40 @@ def _get_opts_and_config(self, opts):
hardlink = False
link = opts.link
else:
- hardlink = self.config['hardlink'].get(bool)
- link = self.config['link'].get(bool)
-
- return (dest, threads, path_formats, fmt, pretend, hardlink, link,
- playlist)
+ hardlink = self.config["hardlink"].get(bool)
+ link = self.config["link"].get(bool)
+
+ return (
+ dest,
+ threads,
+ path_formats,
+ fmt,
+ pretend,
+ hardlink,
+ link,
+ playlist,
+ )
- def _parallel_convert(self, dest, keep_new, path_formats, fmt,
- pretend, link, hardlink, threads, items):
+ def _parallel_convert(
+ self,
+ dest,
+ keep_new,
+ path_formats,
+ fmt,
+ pretend,
+ link,
+ hardlink,
+ threads,
+ items,
+ ):
"""Run the convert_item function for every items on as many thread as
defined in threads
"""
- convert = [self.convert_item(dest,
- keep_new,
- path_formats,
- fmt,
- pretend,
- link,
- hardlink)
- for _ in range(threads)]
+ convert = [
+ self.convert_item(
+ dest, keep_new, path_formats, fmt, pretend, link, hardlink
+ )
+ for _ in range(threads)
+ ]
pipe = util.pipeline.Pipeline([iter(items), convert])
pipe.run_parallel()
diff --git a/beetsplug/deezer.py b/beetsplug/deezer.py
index e60b949644..4c70d841ec 100644
--- a/beetsplug/deezer.py
+++ b/beetsplug/deezer.py
@@ -30,19 +30,19 @@
class DeezerPlugin(MetadataSourcePlugin, BeetsPlugin):
- data_source = 'Deezer'
+ data_source = "Deezer"
item_types = {
- 'deezer_track_rank': types.INTEGER,
- 'deezer_track_id': types.INTEGER,
- 'deezer_updated': DateType(),
+ "deezer_track_rank": types.INTEGER,
+ "deezer_track_id": types.INTEGER,
+ "deezer_updated": DateType(),
}
# Base URLs for the Deezer API
# Documentation: https://developers.deezer.com/api/
- search_url = 'https://api.deezer.com/search/'
- album_url = 'https://api.deezer.com/album/'
- track_url = 'https://api.deezer.com/track/'
+ search_url = "https://api.deezer.com/search/"
+ album_url = "https://api.deezer.com/album/"
+ track_url = "https://api.deezer.com/track/"
id_regex = deezer_id_regex
@@ -52,7 +52,8 @@ def __init__(self):
def commands(self):
"""Add beet UI commands to interact with Deezer."""
deezer_update_cmd = ui.Subcommand(
- 'deezerupdate', help=f'Update {self.data_source} rank')
+ "deezerupdate", help=f"Update {self.data_source} rank"
+ )
def func(lib, opts, args):
items = lib.items(ui.decargs(args))
@@ -71,23 +72,25 @@ def album_for_id(self, album_id):
:return: AlbumInfo object for album.
:rtype: beets.autotag.hooks.AlbumInfo or None
"""
- deezer_id = self._get_id('album', album_id, self.id_regex)
+ deezer_id = self._get_id("album", album_id, self.id_regex)
if deezer_id is None:
return None
album_data = requests.get(self.album_url + deezer_id).json()
- if 'error' in album_data:
- self._log.debug(f"Error fetching album {album_id}: "
- f"{album_data['error']['message']}")
+ if "error" in album_data:
+ self._log.debug(
+ f"Error fetching album {album_id}: "
+ f"{album_data['error']['message']}"
+ )
return None
- contributors = album_data.get('contributors')
+ contributors = album_data.get("contributors")
if contributors is not None:
artist, artist_id = self.get_artist(contributors)
else:
artist, artist_id = None, None
- release_date = album_data['release_date']
- date_parts = [int(part) for part in release_date.split('-')]
+ release_date = album_data["release_date"]
+ date_parts = [int(part) for part in release_date.split("-")]
num_date_parts = len(date_parts)
if num_date_parts == 3:
@@ -105,15 +108,13 @@ def album_for_id(self, album_id):
"by {} API: '{}'".format(self.data_source, release_date)
)
- tracks_obj = requests.get(
- self.album_url + deezer_id + '/tracks'
- ).json()
- tracks_data = tracks_obj['data']
+ tracks_obj = requests.get(self.album_url + deezer_id + "/tracks").json()
+ tracks_data = tracks_obj["data"]
if not tracks_data:
return None
while "next" in tracks_obj:
- tracks_obj = requests.get(tracks_obj['next']).json()
- tracks_data.extend(tracks_obj['data'])
+ tracks_obj = requests.get(tracks_obj["next"]).json()
+ tracks_data.extend(tracks_obj["data"])
tracks = []
medium_totals = collections.defaultdict(int)
@@ -126,24 +127,24 @@ def album_for_id(self, album_id):
track.medium_total = medium_totals[track.medium]
return AlbumInfo(
- album=album_data['title'],
+ album=album_data["title"],
album_id=deezer_id,
deezer_album_id=deezer_id,
artist=artist,
- artist_credit=self.get_artist([album_data['artist']])[0],
+ artist_credit=self.get_artist([album_data["artist"]])[0],
artist_id=artist_id,
tracks=tracks,
- albumtype=album_data['record_type'],
- va=len(album_data['contributors']) == 1
- and artist.lower() == 'various artists',
+ albumtype=album_data["record_type"],
+ va=len(album_data["contributors"]) == 1
+ and artist.lower() == "various artists",
year=year,
month=month,
day=day,
- label=album_data['label'],
+ label=album_data["label"],
mediums=max(medium_totals.keys()),
data_source=self.data_source,
- data_url=album_data['link'],
- cover_art_url=album_data.get('cover_xl'),
+ data_url=album_data["link"],
+ cover_art_url=album_data.get("cover_xl"),
)
def _get_track(self, track_data):
@@ -155,22 +156,22 @@ def _get_track(self, track_data):
:rtype: beets.autotag.hooks.TrackInfo
"""
artist, artist_id = self.get_artist(
- track_data.get('contributors', [track_data['artist']])
+ track_data.get("contributors", [track_data["artist"]])
)
return TrackInfo(
- title=track_data['title'],
- track_id=track_data['id'],
- deezer_track_id=track_data['id'],
- isrc=track_data.get('isrc'),
+ title=track_data["title"],
+ track_id=track_data["id"],
+ deezer_track_id=track_data["id"],
+ isrc=track_data.get("isrc"),
artist=artist,
artist_id=artist_id,
- length=track_data['duration'],
- index=track_data.get('track_position'),
- medium=track_data.get('disk_number'),
- deezer_track_rank=track_data.get('rank'),
- medium_index=track_data.get('track_position'),
+ length=track_data["duration"],
+ index=track_data.get("track_position"),
+ medium=track_data.get("disk_number"),
+ deezer_track_rank=track_data.get("rank"),
+ medium_index=track_data.get("track_position"),
data_source=self.data_source,
- data_url=track_data['link'],
+ data_url=track_data["link"],
deezer_updated=time.time(),
)
@@ -188,13 +189,15 @@ def track_for_id(self, track_id=None, track_data=None):
:rtype: beets.autotag.hooks.TrackInfo or None
"""
if track_data is None:
- deezer_id = self._get_id('track', track_id, self.id_regex)
+ deezer_id = self._get_id("track", track_id, self.id_regex)
if deezer_id is None:
return None
track_data = requests.get(self.track_url + deezer_id).json()
- if 'error' in track_data:
- self._log.debug(f"Error fetching track {track_id}: "
- f"{track_data['error']['message']}")
+ if "error" in track_data:
+ self._log.debug(
+ f"Error fetching track {track_id}: "
+ f"{track_data['error']['message']}"
+ )
return None
track = self._get_track(track_data)
@@ -202,19 +205,19 @@ def track_for_id(self, track_id=None, track_data=None):
# release) and `track.medium_total` (total number of tracks on
# the track's disc).
album_tracks_data = requests.get(
- self.album_url + str(track_data['album']['id']) + '/tracks'
- ).json()['data']
+ self.album_url + str(track_data["album"]["id"]) + "/tracks"
+ ).json()["data"]
medium_total = 0
for i, track_data in enumerate(album_tracks_data, start=1):
- if track_data['disk_number'] == track.medium:
+ if track_data["disk_number"] == track.medium:
medium_total += 1
- if track_data['id'] == track.track_id:
+ if track_data["id"] == track.track_id:
track.index = i
track.medium_total = medium_total
return track
@staticmethod
- def _construct_search_query(filters=None, keywords=''):
+ def _construct_search_query(filters=None, keywords=""):
"""Construct a query string with the specified filters and keywords to
be provided to the Deezer Search API
(https://developers.deezer.com/api/search).
@@ -228,14 +231,14 @@ def _construct_search_query(filters=None, keywords=''):
"""
query_components = [
keywords,
- ' '.join(f'{k}:"{v}"' for k, v in filters.items()),
+ " ".join(f'{k}:"{v}"' for k, v in filters.items()),
]
- query = ' '.join([q for q in query_components if q])
+ query = " ".join([q for q in query_components if q])
if not isinstance(query, str):
- query = query.decode('utf8')
+ query = query.decode("utf8")
return unidecode.unidecode(query)
- def _search_api(self, query_type, filters=None, keywords=''):
+ def _search_api(self, query_type, filters=None, keywords=""):
"""Query the Deezer Search API for the specified ``keywords``, applying
the provided ``filters``.
@@ -251,19 +254,15 @@ def _search_api(self, query_type, filters=None, keywords=''):
if no search results are returned.
:rtype: dict or None
"""
- query = self._construct_search_query(
- keywords=keywords, filters=filters
- )
+ query = self._construct_search_query(keywords=keywords, filters=filters)
if not query:
return None
- self._log.debug(
- f"Searching {self.data_source} for '{query}'"
- )
+ self._log.debug(f"Searching {self.data_source} for '{query}'")
response = requests.get(
- self.search_url + query_type, params={'q': query}
+ self.search_url + query_type, params={"q": query}
)
response.raise_for_status()
- response_data = response.json().get('data', [])
+ response_data = response.json().get("data", [])
self._log.debug(
"Found {} result(s) from {} for '{}'",
len(response_data),
@@ -275,20 +274,25 @@ def _search_api(self, query_type, filters=None, keywords=''):
def deezerupdate(self, items, write):
"""Obtain rank information from Deezer."""
for index, item in enumerate(items, start=1):
- self._log.info('Processing {}/{} tracks - {} ',
- index, len(items), item)
+ self._log.info(
+ "Processing {}/{} tracks - {} ", index, len(items), item
+ )
try:
deezer_track_id = item.deezer_track_id
except AttributeError:
- self._log.debug('No deezer_track_id present for: {}', item)
+ self._log.debug("No deezer_track_id present for: {}", item)
continue
try:
- rank = requests.get(
- f"{self.track_url}{deezer_track_id}").json().get('rank')
- self._log.debug('Deezer track: {} has {} rank',
- deezer_track_id, rank)
+ rank = (
+ requests.get(f"{self.track_url}{deezer_track_id}")
+ .json()
+ .get("rank")
+ )
+ self._log.debug(
+ "Deezer track: {} has {} rank", deezer_track_id, rank
+ )
except Exception as e:
- self._log.debug('Invalid Deezer track_id: {}', e)
+ self._log.debug("Invalid Deezer track_id: {}", e)
continue
item.deezer_track_rank = int(rank)
item.store()
diff --git a/beetsplug/discogs.py b/beetsplug/discogs.py
index d804a3e15a..5bdd277054 100644
--- a/beetsplug/discogs.py
+++ b/beetsplug/discogs.py
@@ -16,79 +16,87 @@
python3-discogs-client library.
"""
-import beets.ui
-from beets import config
-from beets.util.id_extractors import extract_discogs_id_regex
-from beets.autotag.hooks import AlbumInfo, TrackInfo, string_dist
-from beets.plugins import MetadataSourcePlugin, BeetsPlugin, get_distance
-import confuse
-from discogs_client import __version__ as dc_string
-from discogs_client import Release, Master, Client
-from discogs_client.exceptions import DiscogsAPIError
-from requests.exceptions import ConnectionError
import http.client
-import beets
-import re
-import time
import json
-import socket
import os
+import re
+import socket
+import time
import traceback
from string import ascii_lowercase
+import confuse
+from discogs_client import Client, Master, Release
+from discogs_client import __version__ as dc_string
+from discogs_client.exceptions import DiscogsAPIError
+from requests.exceptions import ConnectionError
-USER_AGENT = f'beets/{beets.__version__} +https://beets.io/'
-API_KEY = 'rAzVUQYRaoFjeBjyWuWZ'
-API_SECRET = 'plxtUTqoCzwxZpqdPysCwGuBSmZNdZVy'
+import beets
+import beets.ui
+from beets import config
+from beets.autotag.hooks import AlbumInfo, TrackInfo, string_dist
+from beets.plugins import BeetsPlugin, MetadataSourcePlugin, get_distance
+from beets.util.id_extractors import extract_discogs_id_regex
+
+USER_AGENT = f"beets/{beets.__version__} +https://beets.io/"
+API_KEY = "rAzVUQYRaoFjeBjyWuWZ"
+API_SECRET = "plxtUTqoCzwxZpqdPysCwGuBSmZNdZVy"
# Exceptions that discogs_client should really handle but does not.
-CONNECTION_ERRORS = (ConnectionError, socket.error, http.client.HTTPException,
- ValueError, # JSON decoding raises a ValueError.
- DiscogsAPIError)
+CONNECTION_ERRORS = (
+ ConnectionError,
+ socket.error,
+ http.client.HTTPException,
+ ValueError, # JSON decoding raises a ValueError.
+ DiscogsAPIError,
+)
class DiscogsPlugin(BeetsPlugin):
-
def __init__(self):
super().__init__()
self.check_discogs_client()
- self.config.add({
- 'apikey': API_KEY,
- 'apisecret': API_SECRET,
- 'tokenfile': 'discogs_token.json',
- 'source_weight': 0.5,
- 'user_token': '',
- 'separator': ', ',
- 'index_tracks': False,
- 'append_style_genre': False,
- })
- self.config['apikey'].redact = True
- self.config['apisecret'].redact = True
- self.config['user_token'].redact = True
+ self.config.add(
+ {
+ "apikey": API_KEY,
+ "apisecret": API_SECRET,
+ "tokenfile": "discogs_token.json",
+ "source_weight": 0.5,
+ "user_token": "",
+ "separator": ", ",
+ "index_tracks": False,
+ "append_style_genre": False,
+ }
+ )
+ self.config["apikey"].redact = True
+ self.config["apisecret"].redact = True
+ self.config["user_token"].redact = True
self.discogs_client = None
- self.register_listener('import_begin', self.setup)
+ self.register_listener("import_begin", self.setup)
def check_discogs_client(self):
- """Ensure python3-discogs-client version >= 2.3.15
- """
+ """Ensure python3-discogs-client version >= 2.3.15"""
dc_min_version = [2, 3, 15]
- dc_version = [int(elem) for elem in dc_string.split('.')]
+ dc_version = [int(elem) for elem in dc_string.split(".")]
min_len = min(len(dc_version), len(dc_min_version))
- gt_min = [(elem > elem_min) for elem, elem_min in
- zip(dc_version[:min_len],
- dc_min_version[:min_len])]
+ gt_min = [
+ (elem > elem_min)
+ for elem, elem_min in zip(
+ dc_version[:min_len], dc_min_version[:min_len]
+ )
+ ]
if True not in gt_min:
- self._log.warning(('python3-discogs-client version should be '
- '>= 2.3.15'))
+ self._log.warning(
+ ("python3-discogs-client version should be " ">= 2.3.15")
+ )
def setup(self, session=None):
- """Create the `discogs_client` field. Authenticate if necessary.
- """
- c_key = self.config['apikey'].as_str()
- c_secret = self.config['apisecret'].as_str()
+ """Create the `discogs_client` field. Authenticate if necessary."""
+ c_key = self.config["apikey"].as_str()
+ c_secret = self.config["apisecret"].as_str()
# Try using a configured user token (bypassing OAuth login).
- user_token = self.config['user_token'].as_str()
+ user_token = self.config["user_token"].as_str()
if user_token:
# The rate limit for authenticated users goes up to 60
# requests per minute.
@@ -103,22 +111,19 @@ def setup(self, session=None):
# No token yet. Generate one.
token, secret = self.authenticate(c_key, c_secret)
else:
- token = tokendata['token']
- secret = tokendata['secret']
+ token = tokendata["token"]
+ secret = tokendata["secret"]
- self.discogs_client = Client(USER_AGENT, c_key, c_secret,
- token, secret)
+ self.discogs_client = Client(USER_AGENT, c_key, c_secret, token, secret)
def reset_auth(self):
- """Delete token file & redo the auth steps.
- """
+ """Delete token file & redo the auth steps."""
os.remove(self._tokenfile())
self.setup()
def _tokenfile(self):
- """Get the path to the JSON file for storing the OAuth token.
- """
- return self.config['tokenfile'].get(confuse.Filename(in_app_dir=True))
+ """Get the path to the JSON file for storing the OAuth token."""
+ return self.config["tokenfile"].get(confuse.Filename(in_app_dir=True))
def authenticate(self, c_key, c_secret):
# Get the link for the OAuth page.
@@ -126,8 +131,8 @@ def authenticate(self, c_key, c_secret):
try:
_, _, url = auth_client.get_authorize_url()
except CONNECTION_ERRORS as e:
- self._log.debug('connection error: {0}', e)
- raise beets.ui.UserError('communication with Discogs failed')
+ self._log.debug("connection error: {0}", e)
+ raise beets.ui.UserError("communication with Discogs failed")
beets.ui.print_("To authenticate with Discogs, visit:")
beets.ui.print_(url)
@@ -137,34 +142,28 @@ def authenticate(self, c_key, c_secret):
try:
token, secret = auth_client.get_access_token(code)
except DiscogsAPIError:
- raise beets.ui.UserError('Discogs authorization failed')
+ raise beets.ui.UserError("Discogs authorization failed")
except CONNECTION_ERRORS as e:
- self._log.debug('connection error: {0}', e)
- raise beets.ui.UserError('Discogs token request failed')
+ self._log.debug("connection error: {0}", e)
+ raise beets.ui.UserError("Discogs token request failed")
# Save the token for later use.
- self._log.debug('Discogs token {0}, secret {1}', token, secret)
- with open(self._tokenfile(), 'w') as f:
- json.dump({'token': token, 'secret': secret}, f)
+ self._log.debug("Discogs token {0}, secret {1}", token, secret)
+ with open(self._tokenfile(), "w") as f:
+ json.dump({"token": token, "secret": secret}, f)
return token, secret
def album_distance(self, items, album_info, mapping):
- """Returns the album distance.
- """
+ """Returns the album distance."""
return get_distance(
- data_source='Discogs',
- info=album_info,
- config=self.config
+ data_source="Discogs", info=album_info, config=self.config
)
def track_distance(self, item, track_info):
- """Returns the track distance.
- """
+ """Returns the track distance."""
return get_distance(
- data_source='Discogs',
- info=track_info,
- config=self.config
+ data_source="Discogs", info=track_info, config=self.config
)
def candidates(self, items, artist, album, va_likely, extra_tags=None):
@@ -175,33 +174,37 @@ def candidates(self, items, artist, album, va_likely, extra_tags=None):
return
if not album and not artist:
- self._log.debug('Skipping Discogs query. Files missing album and '
- 'artist tags.')
+ self._log.debug(
+ "Skipping Discogs query. Files missing album and "
+ "artist tags."
+ )
return []
if va_likely:
query = album
else:
- query = f'{artist} {album}'
+ query = f"{artist} {album}"
try:
return self.get_albums(query)
except DiscogsAPIError as e:
- self._log.debug('API Error: {0} (query: {1})', e, query)
+ self._log.debug("API Error: {0} (query: {1})", e, query)
if e.status_code == 401:
self.reset_auth()
return self.candidates(items, artist, album, va_likely)
else:
return []
except CONNECTION_ERRORS:
- self._log.debug('Connection error in album search', exc_info=True)
+ self._log.debug("Connection error in album search", exc_info=True)
return []
- def get_track_from_album_by_title(self, album_info, title,
- dist_threshold=0.3):
+ def get_track_from_album_by_title(
+ self, album_info, title, dist_threshold=0.3
+ ):
def compare_func(track_info):
track_title = getattr(track_info, "title", None)
dist = string_dist(track_title, title)
- return (track_title and dist < dist_threshold)
+ return track_title and dist < dist_threshold
+
return self.get_track_from_album(album_info, compare_func)
def get_track_from_album(self, album_info, compare_func):
@@ -220,11 +223,11 @@ def get_track_from_album(self, album_info, compare_func):
continue
# attach artist info if not provided
- if not track_info['artist']:
- track_info['artist'] = album_info.artist
- track_info['artist_id'] = album_info.artist_id
+ if not track_info["artist"]:
+ track_info["artist"] = album_info.artist
+ track_info["artist_id"] = album_info.artist_id
# attach album info
- track_info['album'] = album_info.album
+ track_info["album"] = album_info.album
return track_info
@@ -246,27 +249,28 @@ def item_candidates(self, item, artist, title):
return
if not artist and not title:
- self._log.debug('Skipping Discogs query. File missing artist and '
- 'title tags.')
+ self._log.debug(
+ "Skipping Discogs query. File missing artist and " "title tags."
+ )
return
- query = f'{artist} {title}'
+ query = f"{artist} {title}"
try:
albums = self.get_albums(query)
except DiscogsAPIError as e:
- self._log.debug('API Error: {0} (query: {1})', e, query)
+ self._log.debug("API Error: {0} (query: {1})", e, query)
if e.status_code == 401:
self.reset_auth()
return self.item_candidates(item, artist, title)
else:
return []
except CONNECTION_ERRORS:
- self._log.debug('Connection error in track search', exc_info=True)
+ self._log.debug("Connection error in track search", exc_info=True)
candidates = []
for album_cur in albums:
- self._log.debug(u'searching within album {0}', album_cur.album)
+ self._log.debug("searching within album {0}", album_cur.album)
track_result = self.get_track_from_album_by_title(
- album_cur, item['title']
+ album_cur, item["title"]
)
if track_result:
candidates.append(track_result)
@@ -280,83 +284,90 @@ def album_for_id(self, album_id):
if not self.discogs_client:
return
- self._log.debug('Searching for release {0}', album_id)
+ self._log.debug("Searching for release {0}", album_id)
discogs_id = extract_discogs_id_regex(album_id)
if not discogs_id:
return None
- result = Release(self.discogs_client, {'id': discogs_id})
+ result = Release(self.discogs_client, {"id": discogs_id})
# Try to obtain title to verify that we indeed have a valid Release
try:
- getattr(result, 'title')
+ getattr(result, "title")
except DiscogsAPIError as e:
if e.status_code != 404:
- self._log.debug('API Error: {0} (query: {1})', e,
- result.data['resource_url'])
+ self._log.debug(
+ "API Error: {0} (query: {1})",
+ e,
+ result.data["resource_url"],
+ )
if e.status_code == 401:
self.reset_auth()
return self.album_for_id(album_id)
return None
except CONNECTION_ERRORS:
- self._log.debug('Connection error in album lookup',
- exc_info=True)
+ self._log.debug("Connection error in album lookup", exc_info=True)
return None
return self.get_album_info(result)
def get_albums(self, query):
- """Returns a list of AlbumInfo objects for a discogs search query.
- """
+ """Returns a list of AlbumInfo objects for a discogs search query."""
# Strip non-word characters from query. Things like "!" and "-" can
# cause a query to return no results, even if they match the artist or
# album title. Use `re.UNICODE` flag to avoid stripping non-english
# word characters.
- query = re.sub(r'(?u)\W+', ' ', query)
+ query = re.sub(r"(?u)\W+", " ", query)
# Strip medium information from query, Things like "CD1" and "disk 1"
# can also negate an otherwise positive result.
- query = re.sub(r'(?i)\b(CD|disc|vinyl)\s*\d+', '', query)
+ query = re.sub(r"(?i)\b(CD|disc|vinyl)\s*\d+", "", query)
try:
- releases = self.discogs_client.search(query,
- type='release').page(1)
+ releases = self.discogs_client.search(query, type="release").page(1)
except CONNECTION_ERRORS:
- self._log.debug("Communication error while searching for {0!r}",
- query, exc_info=True)
+ self._log.debug(
+ "Communication error while searching for {0!r}",
+ query,
+ exc_info=True,
+ )
return []
- return [album for album in map(self.get_album_info, releases[:5])
- if album]
+ return [
+ album for album in map(self.get_album_info, releases[:5]) if album
+ ]
def get_master_year(self, master_id):
"""Fetches a master release given its Discogs ID and returns its year
or None if the master release is not found.
"""
- self._log.debug('Searching for master release {0}', master_id)
- result = Master(self.discogs_client, {'id': master_id})
+ self._log.debug("Searching for master release {0}", master_id)
+ result = Master(self.discogs_client, {"id": master_id})
try:
- year = result.fetch('year')
+ year = result.fetch("year")
return year
except DiscogsAPIError as e:
if e.status_code != 404:
- self._log.debug('API Error: {0} (query: {1})', e,
- result.data['resource_url'])
+ self._log.debug(
+ "API Error: {0} (query: {1})",
+ e,
+ result.data["resource_url"],
+ )
if e.status_code == 401:
self.reset_auth()
return self.get_master_year(master_id)
return None
except CONNECTION_ERRORS:
- self._log.debug('Connection error in master release lookup',
- exc_info=True)
+ self._log.debug(
+ "Connection error in master release lookup", exc_info=True
+ )
return None
def get_album_info(self, result):
- """Returns an AlbumInfo object for a discogs Release object.
- """
+ """Returns an AlbumInfo object for a discogs Release object."""
# Explicitly reload the `Release` fields, as they might not be yet
# present if the result is from a `discogs_client.search()`.
- if not result.data.get('artists'):
+ if not result.data.get("artists"):
result.refresh()
# Sanity check for required fields. The list of required fields is
@@ -364,55 +375,60 @@ def get_album_info(self, result):
# lacking some of these fields. This function expects at least:
# `artists` (>0), `title`, `id`, `tracklist` (>0)
# https://www.discogs.com/help/doc/submission-guidelines-general-rules
- if not all([result.data.get(k) for k in ['artists', 'title', 'id',
- 'tracklist']]):
+ if not all(
+ [
+ result.data.get(k)
+ for k in ["artists", "title", "id", "tracklist"]
+ ]
+ ):
self._log.warning("Release does not contain the required fields")
return None
artist, artist_id = MetadataSourcePlugin.get_artist(
- [a.data for a in result.artists],
- join_key='join'
+ [a.data for a in result.artists], join_key="join"
)
- album = re.sub(r' +', ' ', result.title)
- album_id = result.data['id']
+ album = re.sub(r" +", " ", result.title)
+ album_id = result.data["id"]
# Use `.data` to access the tracklist directly instead of the
# convenient `.tracklist` property, which will strip out useful artist
# information and leave us with skeleton `Artist` objects that will
# each make an API call just to get the same data back.
- tracks = self.get_tracks(result.data['tracklist'])
+ tracks = self.get_tracks(result.data["tracklist"])
# Extract information for the optional AlbumInfo fields, if possible.
- va = result.data['artists'][0].get('name', '').lower() == 'various'
- year = result.data.get('year')
+ va = result.data["artists"][0].get("name", "").lower() == "various"
+ year = result.data.get("year")
mediums = [t.medium for t in tracks]
- country = result.data.get('country')
- data_url = result.data.get('uri')
- style = self.format(result.data.get('styles'))
- base_genre = self.format(result.data.get('genres'))
+ country = result.data.get("country")
+ data_url = result.data.get("uri")
+ style = self.format(result.data.get("styles"))
+ base_genre = self.format(result.data.get("genres"))
- if self.config['append_style_genre'] and style:
- genre = self.config['separator'].as_str().join([base_genre, style])
+ if self.config["append_style_genre"] and style:
+ genre = self.config["separator"].as_str().join([base_genre, style])
else:
genre = base_genre
- discogs_albumid = extract_discogs_id_regex(result.data.get('uri'))
+ discogs_albumid = extract_discogs_id_regex(result.data.get("uri"))
# Extract information for the optional AlbumInfo fields that are
# contained on nested discogs fields.
albumtype = media = label = catalogno = labelid = None
- if result.data.get('formats'):
- albumtype = ', '.join(
- result.data['formats'][0].get('descriptions', [])) or None
- media = result.data['formats'][0]['name']
- if result.data.get('labels'):
- label = result.data['labels'][0].get('name')
- catalogno = result.data['labels'][0].get('catno')
- labelid = result.data['labels'][0].get('id')
+ if result.data.get("formats"):
+ albumtype = (
+ ", ".join(result.data["formats"][0].get("descriptions", []))
+ or None
+ )
+ media = result.data["formats"][0]["name"]
+ if result.data.get("labels"):
+ label = result.data["labels"][0].get("name")
+ catalogno = result.data["labels"][0].get("catno")
+ labelid = result.data["labels"][0].get("id")
# Additional cleanups (various artists name, catalog number, media).
if va:
- artist = config['va_name'].as_str()
- if catalogno == 'none':
+ artist = config["va_name"].as_str()
+ if catalogno == "none":
catalogno = None
# Explicitly set the `media` for the tracks, since it is expected by
# `autotag.apply_metadata`, and set `medium_total`.
@@ -427,43 +443,57 @@ def get_album_info(self, result):
# in #2336.
track.track_id = str(album_id) + "-" + track.track_alt
track.data_url = data_url
- track.data_source = 'Discogs'
+ track.data_source = "Discogs"
# Retrieve master release id (returns None if there isn't one).
- master_id = result.data.get('master_id')
+ master_id = result.data.get("master_id")
# Assume `original_year` is equal to `year` for releases without
# a master release, otherwise fetch the master release.
original_year = self.get_master_year(master_id) if master_id else year
- return AlbumInfo(album=album, album_id=album_id, artist=artist,
- artist_id=artist_id, tracks=tracks,
- albumtype=albumtype, va=va, year=year,
- label=label, mediums=len(set(mediums)),
- releasegroup_id=master_id, catalognum=catalogno,
- country=country, style=style, genre=genre,
- media=media, original_year=original_year,
- data_source='Discogs', data_url=data_url,
- discogs_albumid=discogs_albumid,
- discogs_labelid=labelid, discogs_artistid=artist_id)
+ return AlbumInfo(
+ album=album,
+ album_id=album_id,
+ artist=artist,
+ artist_id=artist_id,
+ tracks=tracks,
+ albumtype=albumtype,
+ va=va,
+ year=year,
+ label=label,
+ mediums=len(set(mediums)),
+ releasegroup_id=master_id,
+ catalognum=catalogno,
+ country=country,
+ style=style,
+ genre=genre,
+ media=media,
+ original_year=original_year,
+ data_source="Discogs",
+ data_url=data_url,
+ discogs_albumid=discogs_albumid,
+ discogs_labelid=labelid,
+ discogs_artistid=artist_id,
+ )
def format(self, classification):
if classification:
- return self.config['separator'].as_str() \
- .join(sorted(classification))
+ return (
+ self.config["separator"].as_str().join(sorted(classification))
+ )
else:
return None
def get_tracks(self, tracklist):
- """Returns a list of TrackInfo objects for a discogs tracklist.
- """
+ """Returns a list of TrackInfo objects for a discogs tracklist."""
try:
clean_tracklist = self.coalesce_tracks(tracklist)
except Exception as exc:
# FIXME: this is an extra precaution for making sure there are no
# side effects after #2222. It should be removed after further
# testing.
- self._log.debug('{}', traceback.format_exc())
- self._log.error('uncaught exception in coalesce_tracks: {}', exc)
+ self._log.debug("{}", traceback.format_exc())
+ self._log.error("uncaught exception in coalesce_tracks: {}", exc)
clean_tracklist = tracklist
tracks = []
index_tracks = {}
@@ -472,7 +502,7 @@ def get_tracks(self, tracklist):
divisions, next_divisions = [], []
for track in clean_tracklist:
# Only real tracks have `position`. Otherwise, it's an index track.
- if track['position']:
+ if track["position"]:
index += 1
if next_divisions:
# End of a block of index tracks: update the current
@@ -480,17 +510,17 @@ def get_tracks(self, tracklist):
divisions += next_divisions
del next_divisions[:]
track_info = self.get_track_info(track, index, divisions)
- track_info.track_alt = track['position']
+ track_info.track_alt = track["position"]
tracks.append(track_info)
else:
- next_divisions.append(track['title'])
+ next_divisions.append(track["title"])
# We expect new levels of division at the beginning of the
# tracklist (and possibly elsewhere).
try:
divisions.pop()
except IndexError:
pass
- index_tracks[index + 1] = track['title']
+ index_tracks[index + 1] = track["title"]
# Fix up medium and medium_index for each track. Discogs position is
# unreliable, but tracks are in order.
@@ -504,7 +534,7 @@ def get_tracks(self, tracklist):
m = sorted({track.medium.lower() for track in tracks})
# If all track.medium are single consecutive letters, assume it is
# a 2-sided medium.
- if ''.join(m) in ascii_lowercase:
+ if "".join(m) in ascii_lowercase:
sides_per_medium = 2
for track in tracks:
@@ -514,10 +544,15 @@ def get_tracks(self, tracklist):
# are the track index, not the medium.
# side_count is the number of mediums or medium sides (in the case
# of two-sided mediums) that were seen before.
- medium_is_index = track.medium and not track.medium_index and (
- len(track.medium) != 1 or
- # Not within standard incremental medium values (A, B, C, ...).
- ord(track.medium) - 64 != side_count + 1
+ medium_is_index = (
+ track.medium
+ and not track.medium_index
+ and (
+ len(track.medium) != 1
+ or
+ # Not within standard incremental medium values (A, B, C, ...).
+ ord(track.medium) - 64 != side_count + 1
+ )
)
if not medium_is_index and medium != track.medium:
@@ -554,51 +589,54 @@ def coalesce_tracks(self, raw_tracklist):
title for the merged track is the one from the previous index track,
if present; otherwise it is a combination of the subtracks titles.
"""
+
def add_merged_subtracks(tracklist, subtracks):
"""Modify `tracklist` in place, merging a list of `subtracks` into
a single track into `tracklist`."""
# Calculate position based on first subtrack, without subindex.
- idx, medium_idx, sub_idx = \
- self.get_track_index(subtracks[0]['position'])
- position = '{}{}'.format(idx or '', medium_idx or '')
+ idx, medium_idx, sub_idx = self.get_track_index(
+ subtracks[0]["position"]
+ )
+ position = "{}{}".format(idx or "", medium_idx or "")
- if tracklist and not tracklist[-1]['position']:
+ if tracklist and not tracklist[-1]["position"]:
# Assume the previous index track contains the track title.
if sub_idx:
# "Convert" the track title to a real track, discarding the
# subtracks assuming they are logical divisions of a
# physical track (12.2.9 Subtracks).
- tracklist[-1]['position'] = position
+ tracklist[-1]["position"] = position
else:
# Promote the subtracks to real tracks, discarding the
# index track, assuming the subtracks are physical tracks.
index_track = tracklist.pop()
# Fix artists when they are specified on the index track.
- if index_track.get('artists'):
+ if index_track.get("artists"):
for subtrack in subtracks:
- if not subtrack.get('artists'):
- subtrack['artists'] = index_track['artists']
+ if not subtrack.get("artists"):
+ subtrack["artists"] = index_track["artists"]
# Concatenate index with track title when index_tracks
# option is set
- if self.config['index_tracks']:
+ if self.config["index_tracks"]:
for subtrack in subtracks:
- subtrack['title'] = '{}: {}'.format(
- index_track['title'], subtrack['title'])
+ subtrack["title"] = "{}: {}".format(
+ index_track["title"], subtrack["title"]
+ )
tracklist.extend(subtracks)
else:
# Merge the subtracks, pick a title, and append the new track.
track = subtracks[0].copy()
- track['title'] = ' / '.join([t['title'] for t in subtracks])
+ track["title"] = " / ".join([t["title"] for t in subtracks])
tracklist.append(track)
# Pre-process the tracklist, trying to identify subtracks.
subtracks = []
tracklist = []
- prev_subindex = ''
+ prev_subindex = ""
for track in raw_tracklist:
# Regular subtrack (track with subindex).
- if track['position']:
- _, _, subindex = self.get_track_index(track['position'])
+ if track["position"]:
+ _, _, subindex = self.get_track_index(track["position"])
if subindex:
if subindex.rjust(len(raw_tracklist)) > prev_subindex:
# Subtrack still part of the current main track.
@@ -611,17 +649,17 @@ def add_merged_subtracks(tracklist, subtracks):
continue
# Index track with nested sub_tracks.
- if not track['position'] and 'sub_tracks' in track:
+ if not track["position"] and "sub_tracks" in track:
# Append the index track, assuming it contains the track title.
tracklist.append(track)
- add_merged_subtracks(tracklist, track['sub_tracks'])
+ add_merged_subtracks(tracklist, track["sub_tracks"])
continue
# Regular track or index track without nested sub_tracks.
if subtracks:
add_merged_subtracks(tracklist, subtracks)
subtracks = []
- prev_subindex = ''
+ prev_subindex = ""
tracklist.append(track)
# Merge and add the remaining subtracks, if any.
@@ -631,23 +669,28 @@ def add_merged_subtracks(tracklist, subtracks):
return tracklist
def get_track_info(self, track, index, divisions):
- """Returns a TrackInfo object for a discogs track.
- """
- title = track['title']
- if self.config['index_tracks']:
- prefix = ', '.join(divisions)
+ """Returns a TrackInfo object for a discogs track."""
+ title = track["title"]
+ if self.config["index_tracks"]:
+ prefix = ", ".join(divisions)
if prefix:
- title = f'{prefix}: {title}'
+ title = f"{prefix}: {title}"
track_id = None
- medium, medium_index, _ = self.get_track_index(track['position'])
+ medium, medium_index, _ = self.get_track_index(track["position"])
artist, artist_id = MetadataSourcePlugin.get_artist(
- track.get('artists', []),
- join_key='join'
+ track.get("artists", []), join_key="join"
+ )
+ length = self.get_track_length(track["duration"])
+ return TrackInfo(
+ title=title,
+ track_id=track_id,
+ artist=artist,
+ artist_id=artist_id,
+ length=length,
+ index=index,
+ medium=medium,
+ medium_index=medium_index,
)
- length = self.get_track_length(track['duration'])
- return TrackInfo(title=title, track_id=track_id, artist=artist,
- artist_id=artist_id, length=length, index=index,
- medium=medium, medium_index=medium_index)
def get_track_index(self, position):
"""Returns the medium, medium index and subtrack index for a discogs
@@ -655,34 +698,33 @@ def get_track_index(self, position):
# Match the standard Discogs positions (12.2.9), which can have several
# forms (1, 1-1, A1, A1.1, A1a, ...).
match = re.match(
- r'^(.*?)' # medium: everything before medium_index.
- r'(\d*?)' # medium_index: a number at the end of
- # `position`, except if followed by a subtrack
- # index.
- # subtrack_index: can only be matched if medium
- # or medium_index have been matched, and can be
- r'((?<=\w)\.[\w]+' # - a dot followed by a string (A.1, 2.A)
- r'|(?<=\d)[A-Z]+' # - a string that follows a number (1A, B2a)
- r')?'
- r'$',
- position.upper()
+ r"^(.*?)" # medium: everything before medium_index.
+ r"(\d*?)" # medium_index: a number at the end of
+ # `position`, except if followed by a subtrack
+ # index.
+ # subtrack_index: can only be matched if medium
+ # or medium_index have been matched, and can be
+ r"((?<=\w)\.[\w]+" # - a dot followed by a string (A.1, 2.A)
+ r"|(?<=\d)[A-Z]+" # - a string that follows a number (1A, B2a)
+ r")?"
+ r"$",
+ position.upper(),
)
if match:
medium, index, subindex = match.groups()
- if subindex and subindex.startswith('.'):
+ if subindex and subindex.startswith("."):
subindex = subindex[1:]
else:
- self._log.debug('Invalid position: {0}', position)
+ self._log.debug("Invalid position: {0}", position)
medium = index = subindex = None
return medium or None, index or None, subindex or None
def get_track_length(self, duration):
- """Returns the track length in seconds for a discogs duration.
- """
+ """Returns the track length in seconds for a discogs duration."""
try:
- length = time.strptime(duration, '%M:%S')
+ length = time.strptime(duration, "%M:%S")
except ValueError:
return None
return length.tm_min * 60 + length.tm_sec
diff --git a/beetsplug/duplicates.py b/beetsplug/duplicates.py
index f655c61f26..ced96e4033 100644
--- a/beetsplug/duplicates.py
+++ b/beetsplug/duplicates.py
@@ -15,123 +15,150 @@
"""List duplicate tracks or albums.
"""
-import shlex
import os
+import shlex
+from beets.library import Album, Item
from beets.plugins import BeetsPlugin
-from beets.ui import decargs, print_, Subcommand, UserError
-from beets.util import command_output, displayable_path, subprocess, \
- bytestring_path, MoveOperation
-from beets.library import Item, Album
-
+from beets.ui import Subcommand, UserError, decargs, print_
+from beets.util import (
+ MoveOperation,
+ bytestring_path,
+ command_output,
+ displayable_path,
+ subprocess,
+)
-PLUGIN = 'duplicates'
+PLUGIN = "duplicates"
class DuplicatesPlugin(BeetsPlugin):
- """List duplicate tracks or albums
- """
+ """List duplicate tracks or albums"""
+
def __init__(self):
super().__init__()
- self.config.add({
- 'album': False,
- 'checksum': '',
- 'copy': '',
- 'count': False,
- 'delete': False,
- 'format': '',
- 'full': False,
- 'keys': [],
- 'merge': False,
- 'move': '',
- 'path': False,
- 'tiebreak': {},
- 'strict': False,
- 'tag': '',
- })
-
- self._command = Subcommand('duplicates',
- help=__doc__,
- aliases=['dup'])
+ self.config.add(
+ {
+ "album": False,
+ "checksum": "",
+ "copy": "",
+ "count": False,
+ "delete": False,
+ "format": "",
+ "full": False,
+ "keys": [],
+ "merge": False,
+ "move": "",
+ "path": False,
+ "tiebreak": {},
+ "strict": False,
+ "tag": "",
+ }
+ )
+
+ self._command = Subcommand("duplicates", help=__doc__, aliases=["dup"])
self._command.parser.add_option(
- '-c', '--count', dest='count',
- action='store_true',
- help='show duplicate counts',
+ "-c",
+ "--count",
+ dest="count",
+ action="store_true",
+ help="show duplicate counts",
)
self._command.parser.add_option(
- '-C', '--checksum', dest='checksum',
- action='store', metavar='PROG',
- help='report duplicates based on arbitrary command',
+ "-C",
+ "--checksum",
+ dest="checksum",
+ action="store",
+ metavar="PROG",
+ help="report duplicates based on arbitrary command",
)
self._command.parser.add_option(
- '-d', '--delete', dest='delete',
- action='store_true',
- help='delete items from library and disk',
+ "-d",
+ "--delete",
+ dest="delete",
+ action="store_true",
+ help="delete items from library and disk",
)
self._command.parser.add_option(
- '-F', '--full', dest='full',
- action='store_true',
- help='show all versions of duplicate tracks or albums',
+ "-F",
+ "--full",
+ dest="full",
+ action="store_true",
+ help="show all versions of duplicate tracks or albums",
)
self._command.parser.add_option(
- '-s', '--strict', dest='strict',
- action='store_true',
- help='report duplicates only if all attributes are set',
+ "-s",
+ "--strict",
+ dest="strict",
+ action="store_true",
+ help="report duplicates only if all attributes are set",
)
self._command.parser.add_option(
- '-k', '--key', dest='keys',
- action='append', metavar='KEY',
- help='report duplicates based on keys (use multiple times)',
+ "-k",
+ "--key",
+ dest="keys",
+ action="append",
+ metavar="KEY",
+ help="report duplicates based on keys (use multiple times)",
)
self._command.parser.add_option(
- '-M', '--merge', dest='merge',
- action='store_true',
- help='merge duplicate items',
+ "-M",
+ "--merge",
+ dest="merge",
+ action="store_true",
+ help="merge duplicate items",
)
self._command.parser.add_option(
- '-m', '--move', dest='move',
- action='store', metavar='DEST',
- help='move items to dest',
+ "-m",
+ "--move",
+ dest="move",
+ action="store",
+ metavar="DEST",
+ help="move items to dest",
)
self._command.parser.add_option(
- '-o', '--copy', dest='copy',
- action='store', metavar='DEST',
- help='copy items to dest',
+ "-o",
+ "--copy",
+ dest="copy",
+ action="store",
+ metavar="DEST",
+ help="copy items to dest",
)
self._command.parser.add_option(
- '-t', '--tag', dest='tag',
- action='store',
- help='tag matched items with \'k=v\' attribute',
+ "-t",
+ "--tag",
+ dest="tag",
+ action="store",
+ help="tag matched items with 'k=v' attribute",
)
self._command.parser.add_all_common_options()
def commands(self):
-
def _dup(lib, opts, args):
self.config.set_args(opts)
- album = self.config['album'].get(bool)
- checksum = self.config['checksum'].get(str)
- copy = bytestring_path(self.config['copy'].as_str())
- count = self.config['count'].get(bool)
- delete = self.config['delete'].get(bool)
- fmt = self.config['format'].get(str)
- full = self.config['full'].get(bool)
- keys = self.config['keys'].as_str_seq()
- merge = self.config['merge'].get(bool)
- move = bytestring_path(self.config['move'].as_str())
- path = self.config['path'].get(bool)
- tiebreak = self.config['tiebreak'].get(dict)
- strict = self.config['strict'].get(bool)
- tag = self.config['tag'].get(str)
+ album = self.config["album"].get(bool)
+ checksum = self.config["checksum"].get(str)
+ copy = bytestring_path(self.config["copy"].as_str())
+ count = self.config["count"].get(bool)
+ delete = self.config["delete"].get(bool)
+ fmt = self.config["format"].get(str)
+ full = self.config["full"].get(bool)
+ keys = self.config["keys"].as_str_seq()
+ merge = self.config["merge"].get(bool)
+ move = bytestring_path(self.config["move"].as_str())
+ path = self.config["path"].get(bool)
+ tiebreak = self.config["tiebreak"].get(dict)
+ strict = self.config["strict"].get(bool)
+ tag = self.config["tag"].get(str)
if album:
if not keys:
- keys = ['mb_albumid']
+ keys = ["mb_albumid"]
items = lib.albums(decargs(args))
else:
if not keys:
- keys = ['mb_trackid', 'mb_albumid']
+ keys = ["mb_trackid", "mb_albumid"]
items = lib.items(decargs(args))
# If there's nothing to do, return early. The code below assumes
@@ -140,43 +167,47 @@ def _dup(lib, opts, args):
return
if path:
- fmt = '$path'
+ fmt = "$path"
# Default format string for count mode.
if count and not fmt:
if album:
- fmt = '$albumartist - $album'
+ fmt = "$albumartist - $album"
else:
- fmt = '$albumartist - $album - $title'
- fmt += ': {0}'
+ fmt = "$albumartist - $album - $title"
+ fmt += ": {0}"
if checksum:
for i in items:
k, _ = self._checksum(i, checksum)
keys = [k]
- for obj_id, obj_count, objs in self._duplicates(items,
- keys=keys,
- full=full,
- strict=strict,
- tiebreak=tiebreak,
- merge=merge):
+ for obj_id, obj_count, objs in self._duplicates(
+ items,
+ keys=keys,
+ full=full,
+ strict=strict,
+ tiebreak=tiebreak,
+ merge=merge,
+ ):
if obj_id: # Skip empty IDs.
for o in objs:
- self._process_item(o,
- copy=copy,
- move=move,
- delete=delete,
- tag=tag,
- fmt=fmt.format(obj_count))
+ self._process_item(
+ o,
+ copy=copy,
+ move=move,
+ delete=delete,
+ tag=tag,
+ fmt=fmt.format(obj_count),
+ )
self._command.func = _dup
return [self._command]
- def _process_item(self, item, copy=False, move=False, delete=False,
- tag=False, fmt=''):
- """Process Item `item`.
- """
+ def _process_item(
+ self, item, copy=False, move=False, delete=False, tag=False, fmt=""
+ ):
+ """Process Item `item`."""
print_(format(item, fmt))
if copy:
item.move(basedir=copy, operation=MoveOperation.COPY)
@@ -188,11 +219,9 @@ def _process_item(self, item, copy=False, move=False, delete=False,
item.remove(delete=True)
if tag:
try:
- k, v = tag.split('=')
+ k, v = tag.split("=")
except Exception:
- raise UserError(
- f"{PLUGIN}: can't parse k=v tag: {tag}"
- )
+ raise UserError(f"{PLUGIN}: can't parse k=v tag: {tag}")
setattr(item, k, v)
item.store()
@@ -201,27 +230,36 @@ def _checksum(self, item, prog):
output as flexattr on a key that is the name of the program, and
return the key, checksum tuple.
"""
- args = [p.format(file=os.fsdecode(item.path))
- for p in shlex.split(prog)]
+ args = [
+ p.format(file=os.fsdecode(item.path)) for p in shlex.split(prog)
+ ]
key = args[0]
checksum = getattr(item, key, False)
if not checksum:
- self._log.debug('key {0} on item {1} not cached:'
- 'computing checksum',
- key, displayable_path(item.path))
+ self._log.debug(
+ "key {0} on item {1} not cached:" "computing checksum",
+ key,
+ displayable_path(item.path),
+ )
try:
checksum = command_output(args).stdout
setattr(item, key, checksum)
item.store()
- self._log.debug('computed checksum for {0} using {1}',
- item.title, key)
+ self._log.debug(
+ "computed checksum for {0} using {1}", item.title, key
+ )
except subprocess.CalledProcessError as e:
- self._log.debug('failed to checksum {0}: {1}',
- displayable_path(item.path), e)
+ self._log.debug(
+ "failed to checksum {0}: {1}",
+ displayable_path(item.path),
+ e,
+ )
else:
- self._log.debug('key {0} on item {1} cached:'
- 'not computing checksum',
- key, displayable_path(item.path))
+ self._log.debug(
+ "key {0} on item {1} cached:" "not computing checksum",
+ key,
+ displayable_path(item.path),
+ )
return key, checksum
def _group_by(self, objs, keys, strict):
@@ -231,18 +269,23 @@ def _group_by(self, objs, keys, strict):
If strict, all attributes must be defined for a duplicate match.
"""
import collections
+
counts = collections.defaultdict(list)
for obj in objs:
values = [getattr(obj, k, None) for k in keys]
- values = [v for v in values if v not in (None, '')]
+ values = [v for v in values if v not in (None, "")]
if strict and len(values) < len(keys):
- self._log.debug('some keys {0} on item {1} are null or empty:'
- ' skipping',
- keys, displayable_path(obj.path))
- elif (not strict and not len(values)):
- self._log.debug('all keys {0} on item {1} are null or empty:'
- ' skipping',
- keys, displayable_path(obj.path))
+ self._log.debug(
+ "some keys {0} on item {1} are null or empty:" " skipping",
+ keys,
+ displayable_path(obj.path),
+ )
+ elif not strict and not len(values):
+ self._log.debug(
+ "all keys {0} on item {1} are null or empty:" " skipping",
+ keys,
+ displayable_path(obj.path),
+ )
else:
key = tuple(values)
counts[key].append(obj)
@@ -258,18 +301,21 @@ def _order(self, objs, tiebreak=None):
"completeness" (objects with more non-null fields come first)
and Albums are ordered by their track count.
"""
- kind = 'items' if all(isinstance(o, Item) for o in objs) else 'albums'
+ kind = "items" if all(isinstance(o, Item) for o in objs) else "albums"
if tiebreak and kind in tiebreak.keys():
key = lambda x: tuple(getattr(x, k) for k in tiebreak[kind])
else:
- if kind == 'items':
+ if kind == "items":
+
def truthy(v):
# Avoid a Unicode warning by avoiding comparison
# between a bytes object and the empty Unicode
# string ''.
- return v is not None and \
- (v != '' if isinstance(v, str) else True)
+ return v is not None and (
+ v != "" if isinstance(v, str) else True
+ )
+
fields = Item.all_keys()
key = lambda x: sum(1 for f in fields if truthy(getattr(x, f)))
else:
@@ -286,13 +332,16 @@ def _merge_items(self, objs):
fields = Item.all_keys()
for f in fields:
for o in objs[1:]:
- if getattr(objs[0], f, None) in (None, ''):
+ if getattr(objs[0], f, None) in (None, ""):
value = getattr(o, f, None)
if value:
- self._log.debug('key {0} on item {1} is null '
- 'or empty: setting from item {2}',
- f, displayable_path(objs[0].path),
- displayable_path(o.path))
+ self._log.debug(
+ "key {0} on item {1} is null "
+ "or empty: setting from item {2}",
+ f,
+ displayable_path(objs[0].path),
+ displayable_path(o.path),
+ )
setattr(objs[0], f, value)
objs[0].store()
break
@@ -310,12 +359,14 @@ def _merge_albums(self, objs):
missing = Item.from_path(i.path)
missing.album_id = objs[0].id
missing.add(i._db)
- self._log.debug('item {0} missing from album {1}:'
- ' merging from {2} into {3}',
- missing,
- objs[0],
- displayable_path(o.path),
- displayable_path(missing.destination()))
+ self._log.debug(
+ "item {0} missing from album {1}:"
+ " merging from {2} into {3}",
+ missing,
+ objs[0],
+ displayable_path(o.path),
+ displayable_path(missing.destination()),
+ )
missing.move(operation=MoveOperation.COPY)
return objs
@@ -331,8 +382,7 @@ def _merge(self, objs):
return objs
def _duplicates(self, objs, keys, full, strict, tiebreak, merge):
- """Generate triples of keys, duplicate counts, and constituent objects.
- """
+ """Generate triples of keys, duplicate counts, and constituent objects."""
offset = 0 if full else 1
for k, objs in self._group_by(objs, keys, strict).items():
if len(objs) > 1:
diff --git a/beetsplug/edit.py b/beetsplug/edit.py
index 6cd0c0df5e..1e934317d9 100644
--- a/beetsplug/edit.py
+++ b/beetsplug/edit.py
@@ -15,19 +15,18 @@
"""Open metadata information in a text editor to let the user edit it.
"""
-from beets import plugins
-from beets import util
-from beets import ui
-from beets.dbcore import types
-from beets.importer import action
-from beets.ui.commands import _do_query, PromptChoice
import codecs
-import subprocess
-import yaml
-from tempfile import NamedTemporaryFile
import os
import shlex
+import subprocess
+from tempfile import NamedTemporaryFile
+
+import yaml
+from beets import plugins, ui, util
+from beets.dbcore import types
+from beets.importer import action
+from beets.ui.commands import PromptChoice, _do_query
# These "safe" types can avoid the format/parse cycle that most fields go
# through: they are safe to edit with native YAML types.
@@ -41,22 +40,20 @@ class ParseError(Exception):
def edit(filename, log):
- """Open `filename` in a text editor.
- """
+ """Open `filename` in a text editor."""
cmd = shlex.split(util.editor_command())
cmd.append(filename)
- log.debug('invoking editor command: {!r}', cmd)
+ log.debug("invoking editor command: {!r}", cmd)
try:
subprocess.call(cmd)
except OSError as exc:
- raise ui.UserError('could not run editor command {!r}: {}'.format(
- cmd[0], exc
- ))
+ raise ui.UserError(
+ "could not run editor command {!r}: {}".format(cmd[0], exc)
+ )
def dump(arg):
- """Dump a sequence of dictionaries as YAML for editing.
- """
+ """Dump a sequence of dictionaries as YAML for editing."""
return yaml.safe_dump_all(
arg,
allow_unicode=True,
@@ -75,7 +72,7 @@ def load(s):
for d in yaml.safe_load_all(s):
if not isinstance(d, dict):
raise ParseError(
- 'each entry must be a dictionary; found {}'.format(
+ "each entry must be a dictionary; found {}".format(
type(d).__name__
)
)
@@ -85,7 +82,7 @@ def load(s):
out.append({str(k): v for k, v in d.items()})
except yaml.YAMLError as e:
- raise ParseError(f'invalid YAML: {e}')
+ raise ParseError(f"invalid YAML: {e}")
return out
@@ -145,51 +142,50 @@ def apply_(obj, data):
class EditPlugin(plugins.BeetsPlugin):
-
def __init__(self):
super().__init__()
- self.config.add({
- # The default fields to edit.
- 'albumfields': 'album albumartist',
- 'itemfields': 'track title artist album',
-
- # Silently ignore any changes to these fields.
- 'ignore_fields': 'id path',
- })
+ self.config.add(
+ {
+ # The default fields to edit.
+ "albumfields": "album albumartist",
+ "itemfields": "track title artist album",
+ # Silently ignore any changes to these fields.
+ "ignore_fields": "id path",
+ }
+ )
- self.register_listener('before_choose_candidate',
- self.before_choose_candidate_listener)
+ self.register_listener(
+ "before_choose_candidate", self.before_choose_candidate_listener
+ )
def commands(self):
- edit_command = ui.Subcommand(
- 'edit',
- help='interactively edit metadata'
- )
+ edit_command = ui.Subcommand("edit", help="interactively edit metadata")
edit_command.parser.add_option(
- '-f', '--field',
- metavar='FIELD',
- action='append',
- help='edit this field also',
+ "-f",
+ "--field",
+ metavar="FIELD",
+ action="append",
+ help="edit this field also",
)
edit_command.parser.add_option(
- '--all',
- action='store_true', dest='all',
- help='edit all fields',
+ "--all",
+ action="store_true",
+ dest="all",
+ help="edit all fields",
)
edit_command.parser.add_album_option()
edit_command.func = self._edit_command
return [edit_command]
def _edit_command(self, lib, opts, args):
- """The CLI command function for the `beet edit` command.
- """
+ """The CLI command function for the `beet edit` command."""
# Get the objects to edit.
query = ui.decargs(args)
items, albums = _do_query(lib, query, opts.album, False)
objs = albums if opts.album else items
if not objs:
- ui.print_('Nothing to edit.')
+ ui.print_("Nothing to edit.")
return
# Get the fields to edit.
@@ -200,20 +196,19 @@ def _edit_command(self, lib, opts, args):
self.edit(opts.album, objs, fields)
def _get_fields(self, album, extra):
- """Get the set of fields to edit.
- """
+ """Get the set of fields to edit."""
# Start with the configured base fields.
if album:
- fields = self.config['albumfields'].as_str_seq()
+ fields = self.config["albumfields"].as_str_seq()
else:
- fields = self.config['itemfields'].as_str_seq()
+ fields = self.config["itemfields"].as_str_seq()
# Add the requested extra fields.
if extra:
fields += extra
# Ensure we always have the `id` field for identification.
- fields.append('id')
+ fields.append("id")
return set(fields)
@@ -242,8 +237,9 @@ def edit_objects(self, objs, fields):
old_data = [flatten(o, fields) for o in objs]
# Set up a temporary file with the initial data for editing.
- new = NamedTemporaryFile(mode='w', suffix='.yaml', delete=False,
- encoding='utf-8')
+ new = NamedTemporaryFile(
+ mode="w", suffix=".yaml", delete=False, encoding="utf-8"
+ )
old_str = dump(old_data)
new.write(old_str)
new.close()
@@ -256,7 +252,7 @@ def edit_objects(self, objs, fields):
# Read the data back after editing and check whether anything
# changed.
- with codecs.open(new.name, encoding='utf-8') as f:
+ with codecs.open(new.name, encoding="utf-8") as f:
new_str = f.read()
if new_str == old_str:
ui.print_("No changes; aborting.")
@@ -275,29 +271,29 @@ def edit_objects(self, objs, fields):
# Show the changes.
# If the objects are not on the DB yet, we need a copy of their
# original state for show_model_changes.
- objs_old = [obj.copy() if obj.id < 0 else None
- for obj in objs]
+ objs_old = [obj.copy() if obj.id < 0 else None for obj in objs]
self.apply_data(objs, old_data, new_data)
changed = False
for obj, obj_old in zip(objs, objs_old):
changed |= ui.show_model_changes(obj, obj_old)
if not changed:
- ui.print_('No changes to apply.')
+ ui.print_("No changes to apply.")
return False
# Confirm the changes.
choice = ui.input_options(
- ('continue Editing', 'apply', 'cancel')
+ ("continue Editing", "apply", "cancel")
)
- if choice == 'a': # Apply.
+ if choice == "a": # Apply.
return True
- elif choice == 'c': # Cancel.
+ elif choice == "c": # Cancel.
return False
- elif choice == 'e': # Keep editing.
+ elif choice == "e": # Keep editing.
# Reset the temporary changes to the objects. I we have a
# copy from above, use that, else reload from the database.
- objs = [(old_obj or obj)
- for old_obj, obj in zip(objs_old, objs)]
+ objs = [
+ (old_obj or obj) for old_obj, obj in zip(objs_old, objs)
+ ]
for obj in objs:
if not obj.id < 0:
obj.load()
@@ -315,33 +311,35 @@ def apply_data(self, objs, old_data, new_data):
are temporary.
"""
if len(old_data) != len(new_data):
- self._log.warning('number of objects changed from {} to {}',
- len(old_data), len(new_data))
+ self._log.warning(
+ "number of objects changed from {} to {}",
+ len(old_data),
+ len(new_data),
+ )
obj_by_id = {o.id: o for o in objs}
- ignore_fields = self.config['ignore_fields'].as_str_seq()
+ ignore_fields = self.config["ignore_fields"].as_str_seq()
for old_dict, new_dict in zip(old_data, new_data):
# Prohibit any changes to forbidden fields to avoid
# clobbering `id` and such by mistake.
forbidden = False
for key in ignore_fields:
if old_dict.get(key) != new_dict.get(key):
- self._log.warning('ignoring object whose {} changed', key)
+ self._log.warning("ignoring object whose {} changed", key)
forbidden = True
break
if forbidden:
continue
- id_ = int(old_dict['id'])
+ id_ = int(old_dict["id"])
apply_(obj_by_id[id_], new_dict)
def save_changes(self, objs):
- """Save a list of updated Model objects to the database.
- """
+ """Save a list of updated Model objects to the database."""
# Save to the database and possibly write tags.
for ob in objs:
if ob._dirty:
- self._log.debug('saving changes to {}', ob)
+ self._log.debug("saving changes to {}", ob)
ob.try_sync(ui.should_write(), ui.should_move())
# Methods for interactive importer execution.
@@ -350,10 +348,13 @@ def before_choose_candidate_listener(self, session, task):
"""Append an "Edit" choice and an "edit Candidates" choice (if
there are candidates) to the interactive importer prompt.
"""
- choices = [PromptChoice('d', 'eDit', self.importer_edit)]
+ choices = [PromptChoice("d", "eDit", self.importer_edit)]
if task.candidates:
- choices.append(PromptChoice('c', 'edit Candidates',
- self.importer_edit_candidate))
+ choices.append(
+ PromptChoice(
+ "c", "edit Candidates", self.importer_edit_candidate
+ )
+ )
return choices
diff --git a/beetsplug/embedart.py b/beetsplug/embedart.py
index aec2187ff9..740863bf16 100644
--- a/beetsplug/embedart.py
+++ b/beetsplug/embedart.py
@@ -34,11 +34,9 @@ def _confirm(objs, album):
`album` is a Boolean indicating whether these are albums (as opposed
to items).
"""
- noun = 'album' if album else 'file'
- prompt = 'Modify artwork for {} {}{} (Y/n)?'.format(
- len(objs),
- noun,
- 's' if len(objs) > 1 else ''
+ noun = "album" if album else "file"
+ prompt = "Modify artwork for {} {}{} (Y/n)?".format(
+ len(objs), noun, "s" if len(objs) > 1 else ""
)
# Show all the items or albums.
@@ -50,56 +48,72 @@ def _confirm(objs, album):
class EmbedCoverArtPlugin(BeetsPlugin):
- """Allows albumart to be embedded into the actual files.
- """
+ """Allows albumart to be embedded into the actual files."""
+
def __init__(self):
super().__init__()
- self.config.add({
- 'maxwidth': 0,
- 'auto': True,
- 'compare_threshold': 0,
- 'ifempty': False,
- 'remove_art_file': False,
- 'quality': 0,
- })
-
- if self.config['maxwidth'].get(int) and not ArtResizer.shared.local:
- self.config['maxwidth'] = 0
- self._log.warning("ImageMagick or PIL not found; "
- "'maxwidth' option ignored")
- if self.config['compare_threshold'].get(int) and not \
- ArtResizer.shared.can_compare:
- self.config['compare_threshold'] = 0
- self._log.warning("ImageMagick 6.8.7 or higher not installed; "
- "'compare_threshold' option ignored")
-
- self.register_listener('art_set', self.process_album)
+ self.config.add(
+ {
+ "maxwidth": 0,
+ "auto": True,
+ "compare_threshold": 0,
+ "ifempty": False,
+ "remove_art_file": False,
+ "quality": 0,
+ }
+ )
+
+ if self.config["maxwidth"].get(int) and not ArtResizer.shared.local:
+ self.config["maxwidth"] = 0
+ self._log.warning(
+ "ImageMagick or PIL not found; " "'maxwidth' option ignored"
+ )
+ if (
+ self.config["compare_threshold"].get(int)
+ and not ArtResizer.shared.can_compare
+ ):
+ self.config["compare_threshold"] = 0
+ self._log.warning(
+ "ImageMagick 6.8.7 or higher not installed; "
+ "'compare_threshold' option ignored"
+ )
+
+ self.register_listener("art_set", self.process_album)
def commands(self):
# Embed command.
- embed_cmd = ui.Subcommand('embedart',
- help='embed image files into file metadata')
- embed_cmd.parser.add_option('-f', '--file', metavar='PATH',
- help='the image file to embed')
+ embed_cmd = ui.Subcommand(
+ "embedart", help="embed image files into file metadata"
+ )
+ embed_cmd.parser.add_option(
+ "-f", "--file", metavar="PATH", help="the image file to embed"
+ )
- embed_cmd.parser.add_option("-y", "--yes", action="store_true",
- help="skip confirmation")
+ embed_cmd.parser.add_option(
+ "-y", "--yes", action="store_true", help="skip confirmation"
+ )
- embed_cmd.parser.add_option('-u', '--url', metavar='URL',
- help='the URL of the image file to embed')
+ embed_cmd.parser.add_option(
+ "-u",
+ "--url",
+ metavar="URL",
+ help="the URL of the image file to embed",
+ )
- maxwidth = self.config['maxwidth'].get(int)
- quality = self.config['quality'].get(int)
- compare_threshold = self.config['compare_threshold'].get(int)
- ifempty = self.config['ifempty'].get(bool)
+ maxwidth = self.config["maxwidth"].get(int)
+ quality = self.config["quality"].get(int)
+ compare_threshold = self.config["compare_threshold"].get(int)
+ ifempty = self.config["ifempty"].get(bool)
def embed_func(lib, opts, args):
if opts.file:
imagepath = normpath(opts.file)
if not os.path.isfile(syspath(imagepath)):
- raise ui.UserError('image file {} not found'.format(
- displayable_path(imagepath)
- ))
+ raise ui.UserError(
+ "image file {} not found".format(
+ displayable_path(imagepath)
+ )
+ )
items = lib.items(decargs(args))
@@ -108,9 +122,16 @@ def embed_func(lib, opts, args):
return
for item in items:
- art.embed_item(self._log, item, imagepath, maxwidth,
- None, compare_threshold, ifempty,
- quality=quality)
+ art.embed_item(
+ self._log,
+ item,
+ imagepath,
+ maxwidth,
+ None,
+ compare_threshold,
+ ifempty,
+ quality=quality,
+ )
elif opts.url:
try:
response = requests.get(opts.url, timeout=5)
@@ -118,15 +139,14 @@ def embed_func(lib, opts, args):
except requests.exceptions.RequestException as e:
self._log.error("{}".format(e))
return
- extension = guess_extension(response.headers
- ['Content-Type'])
+ extension = guess_extension(response.headers["Content-Type"])
if extension is None:
- self._log.error('Invalid image file')
+ self._log.error("Invalid image file")
return
- file = f'image{extension}'
+ file = f"image{extension}"
tempimg = os.path.join(tempfile.gettempdir(), file)
try:
- with open(tempimg, 'wb') as f:
+ with open(tempimg, "wb") as f:
f.write(response.content)
except Exception as e:
self._log.error("Unable to save image: {}".format(e))
@@ -137,9 +157,16 @@ def embed_func(lib, opts, args):
os.remove(tempimg)
return
for item in items:
- art.embed_item(self._log, item, tempimg, maxwidth,
- None, compare_threshold, ifempty,
- quality=quality)
+ art.embed_item(
+ self._log,
+ item,
+ tempimg,
+ maxwidth,
+ None,
+ compare_threshold,
+ ifempty,
+ quality=quality,
+ )
os.remove(tempimg)
else:
albums = lib.albums(decargs(args))
@@ -147,55 +174,70 @@ def embed_func(lib, opts, args):
if not opts.yes and not _confirm(albums, not opts.file):
return
for album in albums:
- art.embed_album(self._log, album, maxwidth,
- False, compare_threshold, ifempty,
- quality=quality)
+ art.embed_album(
+ self._log,
+ album,
+ maxwidth,
+ False,
+ compare_threshold,
+ ifempty,
+ quality=quality,
+ )
self.remove_artfile(album)
embed_cmd.func = embed_func
# Extract command.
extract_cmd = ui.Subcommand(
- 'extractart',
- help='extract an image from file metadata',
+ "extractart",
+ help="extract an image from file metadata",
)
extract_cmd.parser.add_option(
- '-o', dest='outpath',
- help='image output file',
+ "-o",
+ dest="outpath",
+ help="image output file",
)
extract_cmd.parser.add_option(
- '-n', dest='filename',
- help='image filename to create for all matched albums',
+ "-n",
+ dest="filename",
+ help="image filename to create for all matched albums",
)
extract_cmd.parser.add_option(
- '-a', dest='associate', action='store_true',
- help='associate the extracted images with the album',
+ "-a",
+ dest="associate",
+ action="store_true",
+ help="associate the extracted images with the album",
)
def extract_func(lib, opts, args):
if opts.outpath:
- art.extract_first(self._log, normpath(opts.outpath),
- lib.items(decargs(args)))
+ art.extract_first(
+ self._log, normpath(opts.outpath), lib.items(decargs(args))
+ )
else:
- filename = bytestring_path(opts.filename or
- config['art_filename'].get())
- if os.path.dirname(filename) != b'':
+ filename = bytestring_path(
+ opts.filename or config["art_filename"].get()
+ )
+ if os.path.dirname(filename) != b"":
self._log.error(
- "Only specify a name rather than a path for -n")
+ "Only specify a name rather than a path for -n"
+ )
return
for album in lib.albums(decargs(args)):
artpath = normpath(os.path.join(album.path, filename))
- artpath = art.extract_first(self._log, artpath,
- album.items())
+ artpath = art.extract_first(
+ self._log, artpath, album.items()
+ )
if artpath and opts.associate:
album.set_art(artpath)
album.store()
+
extract_cmd.func = extract_func
# Clear command.
clear_cmd = ui.Subcommand(
- 'clearart',
- help='remove images from file metadata',
+ "clearart",
+ help="remove images from file metadata",
)
clear_cmd.parser.add_option(
"-y", "--yes", action="store_true", help="skip confirmation"
@@ -207,27 +249,32 @@ def clear_func(lib, opts, args):
if not opts.yes and not _confirm(items, False):
return
art.clear(self._log, lib, decargs(args))
+
clear_cmd.func = clear_func
return [embed_cmd, extract_cmd, clear_cmd]
def process_album(self, album):
- """Automatically embed art after art has been set
- """
- if self.config['auto'] and ui.should_write():
- max_width = self.config['maxwidth'].get(int)
- art.embed_album(self._log, album, max_width, True,
- self.config['compare_threshold'].get(int),
- self.config['ifempty'].get(bool))
+ """Automatically embed art after art has been set"""
+ if self.config["auto"] and ui.should_write():
+ max_width = self.config["maxwidth"].get(int)
+ art.embed_album(
+ self._log,
+ album,
+ max_width,
+ True,
+ self.config["compare_threshold"].get(int),
+ self.config["ifempty"].get(bool),
+ )
self.remove_artfile(album)
def remove_artfile(self, album):
"""Possibly delete the album art file for an album (if the
appropriate configuration option is enabled).
"""
- if self.config['remove_art_file'] and album.artpath:
+ if self.config["remove_art_file"] and album.artpath:
if os.path.isfile(syspath(album.artpath)):
- self._log.debug('Removing album art file for {0}', album)
+ self._log.debug("Removing album art file for {0}", album)
os.remove(syspath(album.artpath))
album.artpath = None
album.store()
diff --git a/beetsplug/embyupdate.py b/beetsplug/embyupdate.py
index a01883fed1..c885ff760b 100644
--- a/beetsplug/embyupdate.py
+++ b/beetsplug/embyupdate.py
@@ -9,9 +9,10 @@
"""
import hashlib
+from urllib.parse import parse_qs, urlencode, urljoin, urlsplit, urlunsplit
+
import requests
-from urllib.parse import urlencode, urljoin, parse_qs, urlsplit, urlunsplit
from beets import config
from beets.plugins import BeetsPlugin
@@ -32,24 +33,20 @@ def api_url(host, port, endpoint):
"""
# check if http or https is defined as host and create hostname
hostname_list = [host]
- if host.startswith('http://') or host.startswith('https://'):
- hostname = ''.join(hostname_list)
+ if host.startswith("http://") or host.startswith("https://"):
+ hostname = "".join(hostname_list)
else:
- hostname_list.insert(0, 'http://')
- hostname = ''.join(hostname_list)
+ hostname_list.insert(0, "http://")
+ hostname = "".join(hostname_list)
joined = urljoin(
- '{hostname}:{port}'.format(
- hostname=hostname,
- port=port
- ),
- endpoint
+ "{hostname}:{port}".format(hostname=hostname, port=port), endpoint
)
scheme, netloc, path, query_string, fragment = urlsplit(joined)
query_params = parse_qs(query_string)
- query_params['format'] = ['json']
+ query_params["format"] = ["json"]
new_query_string = urlencode(query_params, doseq=True)
return urlunsplit((scheme, netloc, path, new_query_string, fragment))
@@ -66,9 +63,9 @@ def password_data(username, password):
:rtype: dict
"""
return {
- 'username': username,
- 'password': hashlib.sha1(password.encode('utf-8')).hexdigest(),
- 'passwordMd5': hashlib.md5(password.encode('utf-8')).hexdigest()
+ "username": username,
+ "password": hashlib.sha1(password.encode("utf-8")).hexdigest(),
+ "passwordMd5": hashlib.md5(password.encode("utf-8")).hexdigest(),
}
@@ -92,10 +89,10 @@ def create_headers(user_id, token=None):
'Version="0.0.0"'
).format(user_id=user_id)
- headers['x-emby-authorization'] = authorization
+ headers["x-emby-authorization"] = authorization
if token:
- headers['x-mediabrowser-token'] = token
+ headers["x-mediabrowser-token"] = token
return headers
@@ -114,10 +111,10 @@ def get_token(host, port, headers, auth_data):
:returns: Access Token
:rtype: str
"""
- url = api_url(host, port, '/Users/AuthenticateByName')
+ url = api_url(host, port, "/Users/AuthenticateByName")
r = requests.post(url, headers=headers, data=auth_data)
- return r.json().get('AccessToken')
+ return r.json().get("AccessToken")
def get_user(host, port, username):
@@ -132,9 +129,9 @@ def get_user(host, port, username):
:returns: Matched Users
:rtype: list
"""
- url = api_url(host, port, '/Users/Public')
+ url = api_url(host, port, "/Users/Public")
r = requests.get(url)
- user = [i for i in r.json() if i['Name'] == username]
+ user = [i for i in r.json() if i["Name"] == username]
return user
@@ -144,44 +141,44 @@ def __init__(self):
super().__init__()
# Adding defaults.
- config['emby'].add({
- 'host': 'http://localhost',
- 'port': 8096,
- 'apikey': None,
- 'password': None,
- })
+ config["emby"].add(
+ {
+ "host": "http://localhost",
+ "port": 8096,
+ "apikey": None,
+ "password": None,
+ }
+ )
- self.register_listener('database_change', self.listen_for_db_change)
+ self.register_listener("database_change", self.listen_for_db_change)
def listen_for_db_change(self, lib, model):
- """Listens for beets db change and register the update for the end.
- """
- self.register_listener('cli_exit', self.update)
+ """Listens for beets db change and register the update for the end."""
+ self.register_listener("cli_exit", self.update)
def update(self, lib):
- """When the client exists try to send refresh request to Emby.
- """
- self._log.info('Updating Emby library...')
+ """When the client exists try to send refresh request to Emby."""
+ self._log.info("Updating Emby library...")
- host = config['emby']['host'].get()
- port = config['emby']['port'].get()
- username = config['emby']['username'].get()
- password = config['emby']['password'].get()
- userid = config['emby']['userid'].get()
- token = config['emby']['apikey'].get()
+ host = config["emby"]["host"].get()
+ port = config["emby"]["port"].get()
+ username = config["emby"]["username"].get()
+ password = config["emby"]["password"].get()
+ userid = config["emby"]["userid"].get()
+ token = config["emby"]["apikey"].get()
# Check if at least a apikey or password is given.
if not any([password, token]):
- self._log.warning('Provide at least Emby password or apikey.')
+ self._log.warning("Provide at least Emby password or apikey.")
return
if not userid:
# Get user information from the Emby API.
user = get_user(host, port, username)
if not user:
- self._log.warning(f'User {username} could not be found.')
+ self._log.warning(f"User {username} could not be found.")
return
- userid = user[0]['Id']
+ userid = user[0]["Id"]
if not token:
# Create Authentication data and headers.
@@ -191,18 +188,16 @@ def update(self, lib):
# Get authentication token.
token = get_token(host, port, headers, auth_data)
if not token:
- self._log.warning(
- 'Could not get token for user {0}', username
- )
+ self._log.warning("Could not get token for user {0}", username)
return
# Recreate headers with a token.
headers = create_headers(userid, token=token)
# Trigger the Update.
- url = api_url(host, port, '/Library/Refresh')
+ url = api_url(host, port, "/Library/Refresh")
r = requests.post(url, headers=headers)
if r.status_code != 204:
- self._log.warning('Update could not be triggered')
+ self._log.warning("Update could not be triggered")
else:
- self._log.info('Update triggered.')
+ self._log.info("Update triggered.")
diff --git a/beetsplug/export.py b/beetsplug/export.py
index 601901e679..825d63be3d 100644
--- a/beetsplug/export.py
+++ b/beetsplug/export.py
@@ -15,22 +15,23 @@
"""
-import sys
import codecs
-import json
import csv
+import json
+import sys
+from datetime import date, datetime
from xml.etree import ElementTree
-from datetime import datetime, date
-from beets.plugins import BeetsPlugin
-from beets import ui
-from beets import util
import mediafile
+
+from beets import ui, util
+from beets.plugins import BeetsPlugin
from beetsplug.info import library_data, tag_data
class ExportEncoder(json.JSONEncoder):
"""Deals with dates because JSON doesn't have a standard"""
+
def default(self, o):
if isinstance(o, (datetime, date)):
return o.isoformat()
@@ -38,89 +39,99 @@ def default(self, o):
class ExportPlugin(BeetsPlugin):
-
def __init__(self):
super().__init__()
- self.config.add({
- 'default_format': 'json',
- 'json': {
- # JSON module formatting options.
- 'formatting': {
- 'ensure_ascii': False,
- 'indent': 4,
- 'separators': (',', ': '),
- 'sort_keys': True
+ self.config.add(
+ {
+ "default_format": "json",
+ "json": {
+ # JSON module formatting options.
+ "formatting": {
+ "ensure_ascii": False,
+ "indent": 4,
+ "separators": (",", ": "),
+ "sort_keys": True,
+ }
+ },
+ "jsonlines": {
+ # JSON Lines formatting options.
+ "formatting": {
+ "ensure_ascii": False,
+ "separators": (",", ": "),
+ "sort_keys": True,
+ }
+ },
+ "csv": {
+ # CSV module formatting options.
+ "formatting": {
+ # The delimiter used to separate columns.
+ "delimiter": ",",
+ # The dialect to use when formatting the file output.
+ "dialect": "excel",
+ }
+ },
+ "xml": {
+ # XML module formatting options.
+ "formatting": {}
}
- },
- 'jsonlines': {
- # JSON Lines formatting options.
- 'formatting': {
- 'ensure_ascii': False,
- 'separators': (',', ': '),
- 'sort_keys': True
- }
- },
- 'csv': {
- # CSV module formatting options.
- 'formatting': {
- # The delimiter used to separate columns.
- 'delimiter': ',',
- # The dialect to use when formatting the file output.
- 'dialect': 'excel'
- }
- },
- 'xml': {
- # XML module formatting options.
- 'formatting': {}
+ # TODO: Use something like the edit plugin
+ # 'item_fields': []
}
- # TODO: Use something like the edit plugin
- # 'item_fields': []
- })
+ )
def commands(self):
- cmd = ui.Subcommand('export', help='export data from beets')
+ cmd = ui.Subcommand("export", help="export data from beets")
cmd.func = self.run
cmd.parser.add_option(
- '-l', '--library', action='store_true',
- help='show library fields instead of tags',
+ "-l",
+ "--library",
+ action="store_true",
+ help="show library fields instead of tags",
)
cmd.parser.add_option(
- '-a', '--album', action='store_true',
+ "-a",
+ "--album",
+ action="store_true",
help='show album fields instead of tracks (implies "--library")',
)
cmd.parser.add_option(
- '--append', action='store_true', default=False,
- help='if should append data to the file',
+ "--append",
+ action="store_true",
+ default=False,
+ help="if should append data to the file",
)
cmd.parser.add_option(
- '-i', '--include-keys', default=[],
- action='append', dest='included_keys',
- help='comma separated list of keys to show',
+ "-i",
+ "--include-keys",
+ default=[],
+ action="append",
+ dest="included_keys",
+ help="comma separated list of keys to show",
)
cmd.parser.add_option(
- '-o', '--output',
- help='path for the output file. If not given, will print the data'
+ "-o",
+ "--output",
+ help="path for the output file. If not given, will print the data",
)
cmd.parser.add_option(
- '-f', '--format', default='json',
- help="the output format: json (default), jsonlines, csv, or xml"
+ "-f",
+ "--format",
+ default="json",
+ help="the output format: json (default), jsonlines, csv, or xml",
)
return [cmd]
def run(self, lib, opts, args):
file_path = opts.output
- file_mode = 'a' if opts.append else 'w'
- file_format = opts.format or self.config['default_format'].get(str)
- file_format_is_line_based = (file_format == 'jsonlines')
- format_options = self.config[file_format]['formatting'].get(dict)
+ file_mode = "a" if opts.append else "w"
+ file_format = opts.format or self.config["default_format"].get(str)
+ file_format_is_line_based = file_format == "jsonlines"
+ format_options = self.config[file_format]["formatting"].get(dict)
export_format = ExportFormat.factory(
file_type=file_format,
- **{
- 'file_path': file_path,
- 'file_mode': file_mode
- }
+ **{"file_path": file_path, "file_mode": file_mode},
)
if opts.library or opts.album:
@@ -130,17 +141,18 @@ def run(self, lib, opts, args):
included_keys = []
for keys in opts.included_keys:
- included_keys.extend(keys.split(','))
+ included_keys.extend(keys.split(","))
items = []
for data_emitter in data_collector(
- lib, ui.decargs(args),
- album=opts.album,
+ lib,
+ ui.decargs(args),
+ album=opts.album,
):
try:
- data, item = data_emitter(included_keys or '*')
+ data, item = data_emitter(included_keys or "*")
except (mediafile.UnreadableFileError, OSError) as ex:
- self._log.error('cannot read file: {0}', ex)
+ self._log.error("cannot read file: {0}", ex)
continue
for key, value in data.items():
@@ -158,13 +170,17 @@ def run(self, lib, opts, args):
class ExportFormat:
"""The output format type"""
- def __init__(self, file_path, file_mode='w', encoding='utf-8'):
+
+ def __init__(self, file_path, file_mode="w", encoding="utf-8"):
self.path = file_path
self.mode = file_mode
self.encoding = encoding
# creates a file object to write/append or sets to stdout
- self.out_stream = codecs.open(self.path, self.mode, self.encoding) \
- if self.path else sys.stdout
+ self.out_stream = (
+ codecs.open(self.path, self.mode, self.encoding)
+ if self.path
+ else sys.stdout
+ )
@classmethod
def factory(cls, file_type, **kwargs):
@@ -183,17 +199,19 @@ def export(self, data, **kwargs):
class JsonFormat(ExportFormat):
"""Saves in a json file"""
- def __init__(self, file_path, file_mode='w', encoding='utf-8'):
+
+ def __init__(self, file_path, file_mode="w", encoding="utf-8"):
super().__init__(file_path, file_mode, encoding)
def export(self, data, **kwargs):
json.dump(data, self.out_stream, cls=ExportEncoder, **kwargs)
- self.out_stream.write('\n')
+ self.out_stream.write("\n")
class CSVFormat(ExportFormat):
"""Saves in a csv file"""
- def __init__(self, file_path, file_mode='w', encoding='utf-8'):
+
+ def __init__(self, file_path, file_mode="w", encoding="utf-8"):
super().__init__(file_path, file_mode, encoding)
def export(self, data, **kwargs):
@@ -205,23 +223,24 @@ def export(self, data, **kwargs):
class XMLFormat(ExportFormat):
"""Saves in a xml file"""
- def __init__(self, file_path, file_mode='w', encoding='utf-8'):
+
+ def __init__(self, file_path, file_mode="w", encoding="utf-8"):
super().__init__(file_path, file_mode, encoding)
def export(self, data, **kwargs):
# Creates the XML file structure.
- library = ElementTree.Element('library')
- tracks = ElementTree.SubElement(library, 'tracks')
+ library = ElementTree.Element("library")
+ tracks = ElementTree.SubElement(library, "tracks")
if data and isinstance(data[0], dict):
for index, item in enumerate(data):
- track = ElementTree.SubElement(tracks, 'track')
+ track = ElementTree.SubElement(tracks, "track")
for key, value in item.items():
track_details = ElementTree.SubElement(track, key)
track_details.text = value
# Depending on the version of python the encoding needs to change
try:
- data = ElementTree.tostring(library, encoding='unicode', **kwargs)
+ data = ElementTree.tostring(library, encoding="unicode", **kwargs)
except LookupError:
- data = ElementTree.tostring(library, encoding='utf-8', **kwargs)
+ data = ElementTree.tostring(library, encoding="utf-8", **kwargs)
self.out_stream.write(data)
diff --git a/beetsplug/fetchart.py b/beetsplug/fetchart.py
index efa7077b2f..f1b012a5f2 100644
--- a/beetsplug/fetchart.py
+++ b/beetsplug/fetchart.py
@@ -23,22 +23,21 @@
import confuse
import requests
+from mediafile import image_mime_type
+
from beets import config, importer, plugins, ui, util
from beets.util import bytestring_path, py3_path, sorted_walk, syspath
from beets.util.artresizer import ArtResizer
-from mediafile import image_mime_type
try:
from bs4 import BeautifulSoup
+
HAS_BEAUTIFUL_SOUP = True
except ImportError:
HAS_BEAUTIFUL_SOUP = False
-CONTENT_TYPES = {
- 'image/jpeg': [b'jpg', b'jpeg'],
- 'image/png': [b'png']
-}
+CONTENT_TYPES = {"image/jpeg": [b"jpg", b"jpeg"], "image/png": [b"png"]}
IMAGE_EXTENSIONS = [ext for exts in CONTENT_TYPES.values() for ext in exts]
@@ -46,6 +45,7 @@ class Candidate:
"""Holds information about a matching artwork, deals with validation of
dimension restrictions and resizing.
"""
+
CANDIDATE_BAD = 0
CANDIDATE_EXACT = 1
CANDIDATE_DOWNSCALE = 2
@@ -56,8 +56,9 @@ class Candidate:
MATCH_EXACT = 0
MATCH_FALLBACK = 1
- def __init__(self, log, path=None, url=None, source='',
- match=None, size=None):
+ def __init__(
+ self, log, path=None, url=None, source="", match=None, size=None
+ ):
self._log = log
self.path = path
self.url = url
@@ -81,22 +82,29 @@ def _validate(self, plugin):
if not self.path:
return self.CANDIDATE_BAD
- if (not (plugin.enforce_ratio or plugin.minwidth or plugin.maxwidth
- or plugin.max_filesize or plugin.deinterlace
- or plugin.cover_format)):
+ if not (
+ plugin.enforce_ratio
+ or plugin.minwidth
+ or plugin.maxwidth
+ or plugin.max_filesize
+ or plugin.deinterlace
+ or plugin.cover_format
+ ):
return self.CANDIDATE_EXACT
# get_size returns None if no local imaging backend is available
if not self.size:
self.size = ArtResizer.shared.get_size(self.path)
- self._log.debug('image size: {}', self.size)
+ self._log.debug("image size: {}", self.size)
if not self.size:
- self._log.warning('Could not get size of image (please see '
- 'documentation for dependencies). '
- 'The configuration options `minwidth`, '
- '`enforce_ratio` and `max_filesize` '
- 'may be violated.')
+ self._log.warning(
+ "Could not get size of image (please see "
+ "documentation for dependencies). "
+ "The configuration options `minwidth`, "
+ "`enforce_ratio` and `max_filesize` "
+ "may be violated."
+ )
return self.CANDIDATE_EXACT
short_edge = min(self.size)
@@ -104,8 +112,9 @@ def _validate(self, plugin):
# Check minimum dimension.
if plugin.minwidth and self.size[0] < plugin.minwidth:
- self._log.debug('image too small ({} < {})',
- self.size[0], plugin.minwidth)
+ self._log.debug(
+ "image too small ({} < {})", self.size[0], plugin.minwidth
+ )
return self.CANDIDATE_BAD
# Check aspect ratio.
@@ -113,28 +122,38 @@ def _validate(self, plugin):
if plugin.enforce_ratio:
if plugin.margin_px:
if edge_diff > plugin.margin_px:
- self._log.debug('image is not close enough to being '
- 'square, ({} - {} > {})',
- long_edge, short_edge, plugin.margin_px)
+ self._log.debug(
+ "image is not close enough to being "
+ "square, ({} - {} > {})",
+ long_edge,
+ short_edge,
+ plugin.margin_px,
+ )
return self.CANDIDATE_BAD
elif plugin.margin_percent:
margin_px = plugin.margin_percent * long_edge
if edge_diff > margin_px:
- self._log.debug('image is not close enough to being '
- 'square, ({} - {} > {})',
- long_edge, short_edge, margin_px)
+ self._log.debug(
+ "image is not close enough to being "
+ "square, ({} - {} > {})",
+ long_edge,
+ short_edge,
+ margin_px,
+ )
return self.CANDIDATE_BAD
elif edge_diff:
# also reached for margin_px == 0 and margin_percent == 0.0
- self._log.debug('image is not square ({} != {})',
- self.size[0], self.size[1])
+ self._log.debug(
+ "image is not square ({} != {})", self.size[0], self.size[1]
+ )
return self.CANDIDATE_BAD
# Check maximum dimension.
downscale = False
if plugin.maxwidth and self.size[0] > plugin.maxwidth:
- self._log.debug('image needs rescaling ({} > {})',
- self.size[0], plugin.maxwidth)
+ self._log.debug(
+ "image needs rescaling ({} > {})", self.size[0], plugin.maxwidth
+ )
downscale = True
# Check filesize.
@@ -142,8 +161,11 @@ def _validate(self, plugin):
if plugin.max_filesize:
filesize = os.stat(syspath(self.path)).st_size
if filesize > plugin.max_filesize:
- self._log.debug('image needs resizing ({}B > {}B)',
- filesize, plugin.max_filesize)
+ self._log.debug(
+ "image needs resizing ({}B > {}B)",
+ filesize,
+ plugin.max_filesize,
+ )
downsize = True
# Check image format
@@ -152,8 +174,11 @@ def _validate(self, plugin):
fmt = ArtResizer.shared.get_format(self.path)
reformat = fmt != plugin.cover_format
if reformat:
- self._log.debug('image needs reformatting: {} -> {}',
- fmt, plugin.cover_format)
+ self._log.debug(
+ "image needs reformatting: {} -> {}",
+ fmt,
+ plugin.cover_format,
+ )
if downscale:
return self.CANDIDATE_DOWNSCALE
@@ -172,23 +197,27 @@ def validate(self, plugin):
def resize(self, plugin):
if self.check == self.CANDIDATE_DOWNSCALE:
- self.path = \
- ArtResizer.shared.resize(plugin.maxwidth, self.path,
- quality=plugin.quality,
- max_filesize=plugin.max_filesize)
+ self.path = ArtResizer.shared.resize(
+ plugin.maxwidth,
+ self.path,
+ quality=plugin.quality,
+ max_filesize=plugin.max_filesize,
+ )
elif self.check == self.CANDIDATE_DOWNSIZE:
# dimensions are correct, so maxwidth is set to maximum dimension
- self.path = \
- ArtResizer.shared.resize(max(self.size), self.path,
- quality=plugin.quality,
- max_filesize=plugin.max_filesize)
+ self.path = ArtResizer.shared.resize(
+ max(self.size),
+ self.path,
+ quality=plugin.quality,
+ max_filesize=plugin.max_filesize,
+ )
elif self.check == self.CANDIDATE_DEINTERLACE:
self.path = ArtResizer.shared.deinterlace(self.path)
elif self.check == self.CANDIDATE_REFORMAT:
self.path = ArtResizer.shared.reformat(
- self.path,
- plugin.cover_format,
- deinterlaced=plugin.deinterlace,
+ self.path,
+ plugin.cover_format,
+ deinterlaced=plugin.deinterlace,
)
@@ -207,26 +236,26 @@ def _logged_get(log, *args, **kwargs):
# `requests.Session.request`.
req_kwargs = kwargs
send_kwargs = {}
- for arg in ('stream', 'verify', 'proxies', 'cert', 'timeout'):
+ for arg in ("stream", "verify", "proxies", "cert", "timeout"):
if arg in kwargs:
send_kwargs[arg] = req_kwargs.pop(arg)
# Our special logging message parameter.
- if 'message' in kwargs:
- message = kwargs.pop('message')
+ if "message" in kwargs:
+ message = kwargs.pop("message")
else:
- message = 'getting URL'
+ message = "getting URL"
- req = requests.Request('GET', *args, **req_kwargs)
+ req = requests.Request("GET", *args, **req_kwargs)
with requests.Session() as s:
- s.headers = {'User-Agent': 'beets'}
+ s.headers = {"User-Agent": "beets"}
prepped = s.prepare_request(req)
settings = s.merge_environment_settings(
prepped.url, {}, None, None, None
)
send_kwargs.update(settings)
- log.debug('{}: {}', message, prepped.url)
+ log.debug("{}: {}", message, prepped.url)
return s.send(prepped, **send_kwargs)
@@ -245,8 +274,9 @@ def request(self, *args, **kwargs):
# ART SOURCES ################################################################
+
class ArtSource(RequestMixin):
- VALID_MATCHING_CRITERIA = ['default']
+ VALID_MATCHING_CRITERIA = ["default"]
def __init__(self, log, config, match_by=None):
self._log = log
@@ -279,7 +309,7 @@ def cleanup(self, candidate):
class LocalArtSource(ArtSource):
IS_LOCAL = True
- LOC_STR = 'local'
+ LOC_STR = "local"
def fetch_image(self, candidate, plugin):
pass
@@ -287,7 +317,7 @@ def fetch_image(self, candidate, plugin):
class RemoteArtSource(ArtSource):
IS_LOCAL = False
- LOC_STR = 'remote'
+ LOC_STR = "remote"
def fetch_image(self, candidate, plugin):
"""Downloads an image from a URL and checks whether it seems to
@@ -295,12 +325,16 @@ def fetch_image(self, candidate, plugin):
Otherwise, returns None.
"""
if plugin.maxwidth:
- candidate.url = ArtResizer.shared.proxy_url(plugin.maxwidth,
- candidate.url)
+ candidate.url = ArtResizer.shared.proxy_url(
+ plugin.maxwidth, candidate.url
+ )
try:
- with closing(self.request(candidate.url, stream=True,
- message='downloading image')) as resp:
- ct = resp.headers.get('Content-Type', None)
+ with closing(
+ self.request(
+ candidate.url, stream=True, message="downloading image"
+ )
+ ) as resp:
+ ct = resp.headers.get("Content-Type", None)
# Download the image to a temporary file. As some servers
# (notably fanart.tv) have proven to return wrong Content-Types
@@ -308,7 +342,7 @@ def fetch_image(self, candidate, plugin):
# rely on it. Instead validate the type using the file magic
# and only then determine the extension.
data = resp.iter_content(chunk_size=1024)
- header = b''
+ header = b""
for chunk in data:
header += chunk
if len(header) >= 32:
@@ -327,17 +361,23 @@ def fetch_image(self, candidate, plugin):
real_ct = ct
if real_ct not in CONTENT_TYPES:
- self._log.debug('not a supported image: {}',
- real_ct or 'unknown content type')
+ self._log.debug(
+ "not a supported image: {}",
+ real_ct or "unknown content type",
+ )
return
- ext = b'.' + CONTENT_TYPES[real_ct][0]
+ ext = b"." + CONTENT_TYPES[real_ct][0]
if real_ct != ct:
- self._log.warning('Server specified {}, but returned a '
- '{} image. Correcting the extension '
- 'to {}',
- ct, real_ct, ext)
+ self._log.warning(
+ "Server specified {}, but returned a "
+ "{} image. Correcting the extension "
+ "to {}",
+ ct,
+ real_ct,
+ ext,
+ )
suffix = py3_path(ext)
with NamedTemporaryFile(suffix=suffix, delete=False) as fh:
@@ -346,15 +386,16 @@ def fetch_image(self, candidate, plugin):
# download the remaining part of the image
for chunk in data:
fh.write(chunk)
- self._log.debug('downloaded art to: {0}',
- util.displayable_path(fh.name))
+ self._log.debug(
+ "downloaded art to: {0}", util.displayable_path(fh.name)
+ )
candidate.path = util.bytestring_path(fh.name)
return
except (OSError, requests.RequestException, TypeError) as exc:
# Handling TypeError works around a urllib3 bug:
# https://github.com/shazow/urllib3/issues/556
- self._log.debug('error fetching art: {}', exc)
+ self._log.debug("error fetching art: {}", exc)
return
def cleanup(self, candidate):
@@ -362,16 +403,16 @@ def cleanup(self, candidate):
try:
util.remove(path=candidate.path)
except util.FilesystemError as exc:
- self._log.debug('error cleaning up tmp art: {}', exc)
+ self._log.debug("error cleaning up tmp art: {}", exc)
class CoverArtArchive(RemoteArtSource):
NAME = "Cover Art Archive"
- VALID_MATCHING_CRITERIA = ['release', 'releasegroup']
+ VALID_MATCHING_CRITERIA = ["release", "releasegroup"]
VALID_THUMBNAIL_SIZES = [250, 500, 1200]
- URL = 'https://coverartarchive.org/release/{mbid}'
- GROUP_URL = 'https://coverartarchive.org/release-group/{mbid}'
+ URL = "https://coverartarchive.org/release/{mbid}"
+ GROUP_URL = "https://coverartarchive.org/release-group/{mbid}"
def get(self, album, plugin, paths):
"""Return the Cover Art Archive and Cover Art Archive release
@@ -383,20 +424,24 @@ def get_image_urls(url, preferred_width=None):
try:
response = self.request(url)
except requests.RequestException:
- self._log.debug('{}: error receiving response'
- .format(self.NAME))
+ self._log.debug(
+ "{}: error receiving response".format(self.NAME)
+ )
return
try:
data = response.json()
except ValueError:
- self._log.debug('{}: error loading response: {}'
- .format(self.NAME, response.text))
+ self._log.debug(
+ "{}: error loading response: {}".format(
+ self.NAME, response.text
+ )
+ )
return
- for item in data.get('images', []):
+ for item in data.get("images", []):
try:
- if 'Front' not in item['types']:
+ if "Front" not in item["types"]:
continue
# If there is a pre-sized thumbnail of the desired size
@@ -422,45 +467,45 @@ def get_image_urls(url, preferred_width=None):
if plugin.maxwidth in self.VALID_THUMBNAIL_SIZES:
preferred_width = str(plugin.maxwidth)
- if 'release' in self.match_by and album.mb_albumid:
+ if "release" in self.match_by and album.mb_albumid:
for url in get_image_urls(release_url, preferred_width):
yield self._candidate(url=url, match=Candidate.MATCH_EXACT)
- if 'releasegroup' in self.match_by and album.mb_releasegroupid:
+ if "releasegroup" in self.match_by and album.mb_releasegroupid:
for url in get_image_urls(release_group_url, preferred_width):
yield self._candidate(url=url, match=Candidate.MATCH_FALLBACK)
class Amazon(RemoteArtSource):
NAME = "Amazon"
- URL = 'https://images.amazon.com/images/P/%s.%02i.LZZZZZZZ.jpg'
+ URL = "https://images.amazon.com/images/P/%s.%02i.LZZZZZZZ.jpg"
INDICES = (1, 2)
def get(self, album, plugin, paths):
- """Generate URLs using Amazon ID (ASIN) string.
- """
+ """Generate URLs using Amazon ID (ASIN) string."""
if album.asin:
for index in self.INDICES:
- yield self._candidate(url=self.URL % (album.asin, index),
- match=Candidate.MATCH_EXACT)
+ yield self._candidate(
+ url=self.URL % (album.asin, index),
+ match=Candidate.MATCH_EXACT,
+ )
class AlbumArtOrg(RemoteArtSource):
NAME = "AlbumArt.org scraper"
- URL = 'https://www.albumart.org/index_detail.php'
+ URL = "https://www.albumart.org/index_detail.php"
PAT = r'href\s*=\s*"([^>"]*)"[^>]*title\s*=\s*"View larger image"'
def get(self, album, plugin, paths):
- """Return art URL from AlbumArt.org using album ASIN.
- """
+ """Return art URL from AlbumArt.org using album ASIN."""
if not album.asin:
return
# Get the page from albumart.org.
try:
- resp = self.request(self.URL, params={'asin': album.asin})
- self._log.debug('scraped art URL: {0}', resp.url)
+ resp = self.request(self.URL, params={"asin": album.asin})
+ self._log.debug("scraped art URL: {0}", resp.url)
except requests.RequestException:
- self._log.debug('error scraping art page')
+ self._log.debug("error scraping art page")
return
# Search the page for the image URL.
@@ -469,29 +514,31 @@ def get(self, album, plugin, paths):
image_url = m.group(1)
yield self._candidate(url=image_url, match=Candidate.MATCH_EXACT)
else:
- self._log.debug('no image found on page')
+ self._log.debug("no image found on page")
class GoogleImages(RemoteArtSource):
NAME = "Google Images"
- URL = 'https://www.googleapis.com/customsearch/v1'
+ URL = "https://www.googleapis.com/customsearch/v1"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.key = self._config['google_key'].get(),
- self.cx = self._config['google_engine'].get(),
+ self.key = (self._config["google_key"].get(),)
+ self.cx = (self._config["google_engine"].get(),)
@staticmethod
def add_default_config(config):
- config.add({
- 'google_key': None,
- 'google_engine': '001442825323518660753:hrh5ch1gjzm',
- })
- config['google_key'].redact = True
+ config.add(
+ {
+ "google_key": None,
+ "google_engine": "001442825323518660753:hrh5ch1gjzm",
+ }
+ )
+ config["google_key"].redact = True
@classmethod
def available(cls, log, config):
- has_key = bool(config['google_key'].get())
+ has_key = bool(config["google_key"].get())
if not has_key:
log.debug("google: Disabling art source due to missing key")
return has_key
@@ -502,55 +549,63 @@ def get(self, album, plugin, paths):
"""
if not (album.albumartist and album.album):
return
- search_string = (album.albumartist + ',' + album.album).encode('utf-8')
+ search_string = (album.albumartist + "," + album.album).encode("utf-8")
try:
- response = self.request(self.URL, params={
- 'key': self.key,
- 'cx': self.cx,
- 'q': search_string,
- 'searchType': 'image'
- })
+ response = self.request(
+ self.URL,
+ params={
+ "key": self.key,
+ "cx": self.cx,
+ "q": search_string,
+ "searchType": "image",
+ },
+ )
except requests.RequestException:
- self._log.debug('google: error receiving response')
+ self._log.debug("google: error receiving response")
return
# Get results using JSON.
try:
data = response.json()
except ValueError:
- self._log.debug('google: error loading response: {}'
- .format(response.text))
+ self._log.debug(
+ "google: error loading response: {}".format(response.text)
+ )
return
- if 'error' in data:
- reason = data['error']['errors'][0]['reason']
- self._log.debug('google fetchart error: {0}', reason)
+ if "error" in data:
+ reason = data["error"]["errors"][0]["reason"]
+ self._log.debug("google fetchart error: {0}", reason)
return
- if 'items' in data.keys():
- for item in data['items']:
- yield self._candidate(url=item['link'],
- match=Candidate.MATCH_EXACT)
+ if "items" in data.keys():
+ for item in data["items"]:
+ yield self._candidate(
+ url=item["link"], match=Candidate.MATCH_EXACT
+ )
class FanartTV(RemoteArtSource):
"""Art from fanart.tv requested using their API"""
+
NAME = "fanart.tv"
- API_URL = 'https://webservice.fanart.tv/v3/'
- API_ALBUMS = API_URL + 'music/albums/'
- PROJECT_KEY = '61a7d0ab4e67162b7a0c7c35915cd48e'
+ API_URL = "https://webservice.fanart.tv/v3/"
+ API_ALBUMS = API_URL + "music/albums/"
+ PROJECT_KEY = "61a7d0ab4e67162b7a0c7c35915cd48e"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.client_key = self._config['fanarttv_key'].get()
+ self.client_key = self._config["fanarttv_key"].get()
@staticmethod
def add_default_config(config):
- config.add({
- 'fanarttv_key': None,
- })
- config['fanarttv_key'].redact = True
+ config.add(
+ {
+ "fanarttv_key": None,
+ }
+ )
+ config["fanarttv_key"].redact = True
def get(self, album, plugin, paths):
if not album.mb_releasegroupid:
@@ -559,126 +614,142 @@ def get(self, album, plugin, paths):
try:
response = self.request(
self.API_ALBUMS + album.mb_releasegroupid,
- headers={'api-key': self.PROJECT_KEY,
- 'client-key': self.client_key})
+ headers={
+ "api-key": self.PROJECT_KEY,
+ "client-key": self.client_key,
+ },
+ )
except requests.RequestException:
- self._log.debug('fanart.tv: error receiving response')
+ self._log.debug("fanart.tv: error receiving response")
return
try:
data = response.json()
except ValueError:
- self._log.debug('fanart.tv: error loading response: {}',
- response.text)
+ self._log.debug(
+ "fanart.tv: error loading response: {}", response.text
+ )
return
- if 'status' in data and data['status'] == 'error':
- if 'not found' in data['error message'].lower():
- self._log.debug('fanart.tv: no image found')
- elif 'api key' in data['error message'].lower():
- self._log.warning('fanart.tv: Invalid API key given, please '
- 'enter a valid one in your config file.')
+ if "status" in data and data["status"] == "error":
+ if "not found" in data["error message"].lower():
+ self._log.debug("fanart.tv: no image found")
+ elif "api key" in data["error message"].lower():
+ self._log.warning(
+ "fanart.tv: Invalid API key given, please "
+ "enter a valid one in your config file."
+ )
else:
- self._log.debug('fanart.tv: error on request: {}',
- data['error message'])
+ self._log.debug(
+ "fanart.tv: error on request: {}", data["error message"]
+ )
return
matches = []
# can there be more than one releasegroupid per response?
- for mbid, art in data.get('albums', {}).items():
+ for mbid, art in data.get("albums", {}).items():
# there might be more art referenced, e.g. cdart, and an albumcover
# might not be present, even if the request was successful
- if album.mb_releasegroupid == mbid and 'albumcover' in art:
- matches.extend(art['albumcover'])
+ if album.mb_releasegroupid == mbid and "albumcover" in art:
+ matches.extend(art["albumcover"])
# can this actually occur?
else:
- self._log.debug('fanart.tv: unexpected mb_releasegroupid in '
- 'response!')
+ self._log.debug(
+ "fanart.tv: unexpected mb_releasegroupid in " "response!"
+ )
- matches.sort(key=lambda x: int(x['likes']), reverse=True)
+ matches.sort(key=lambda x: int(x["likes"]), reverse=True)
for item in matches:
# fanart.tv has a strict size requirement for album art to be
# uploaded
- yield self._candidate(url=item['url'],
- match=Candidate.MATCH_EXACT,
- size=(1000, 1000))
+ yield self._candidate(
+ url=item["url"], match=Candidate.MATCH_EXACT, size=(1000, 1000)
+ )
class ITunesStore(RemoteArtSource):
NAME = "iTunes Store"
- API_URL = 'https://itunes.apple.com/search'
+ API_URL = "https://itunes.apple.com/search"
def get(self, album, plugin, paths):
- """Return art URL from iTunes Store given an album title.
- """
+ """Return art URL from iTunes Store given an album title."""
if not (album.albumartist and album.album):
return
payload = {
- 'term': album.albumartist + ' ' + album.album,
- 'entity': 'album',
- 'media': 'music',
- 'limit': 200
+ "term": album.albumartist + " " + album.album,
+ "entity": "album",
+ "media": "music",
+ "limit": 200,
}
try:
r = self.request(self.API_URL, params=payload)
r.raise_for_status()
except requests.RequestException as e:
- self._log.debug('iTunes search failed: {0}', e)
+ self._log.debug("iTunes search failed: {0}", e)
return
try:
- candidates = r.json()['results']
+ candidates = r.json()["results"]
except ValueError as e:
- self._log.debug('Could not decode json response: {0}', e)
+ self._log.debug("Could not decode json response: {0}", e)
return
except KeyError as e:
- self._log.debug('{} not found in json. Fields are {} ',
- e,
- list(r.json().keys()))
+ self._log.debug(
+ "{} not found in json. Fields are {} ", e, list(r.json().keys())
+ )
return
if not candidates:
- self._log.debug('iTunes search for {!r} got no results',
- payload['term'])
+ self._log.debug(
+ "iTunes search for {!r} got no results", payload["term"]
+ )
return
- if self._config['high_resolution']:
- image_suffix = '100000x100000-999'
+ if self._config["high_resolution"]:
+ image_suffix = "100000x100000-999"
else:
- image_suffix = '1200x1200bb'
+ image_suffix = "1200x1200bb"
for c in candidates:
try:
- if (c['artistName'] == album.albumartist
- and c['collectionName'] == album.album):
- art_url = c['artworkUrl100']
- art_url = art_url.replace('100x100bb',
- image_suffix)
- yield self._candidate(url=art_url,
- match=Candidate.MATCH_EXACT)
+ if (
+ c["artistName"] == album.albumartist
+ and c["collectionName"] == album.album
+ ):
+ art_url = c["artworkUrl100"]
+ art_url = art_url.replace("100x100bb", image_suffix)
+ yield self._candidate(
+ url=art_url, match=Candidate.MATCH_EXACT
+ )
except KeyError as e:
- self._log.debug('Malformed itunes candidate: {} not found in {}', # NOQA E501
- e,
- list(c.keys()))
+ self._log.debug(
+ "Malformed itunes candidate: {} not found in {}", # NOQA E501
+ e,
+ list(c.keys()),
+ )
try:
- fallback_art_url = candidates[0]['artworkUrl100']
- fallback_art_url = fallback_art_url.replace('100x100bb',
- image_suffix)
- yield self._candidate(url=fallback_art_url,
- match=Candidate.MATCH_FALLBACK)
+ fallback_art_url = candidates[0]["artworkUrl100"]
+ fallback_art_url = fallback_art_url.replace(
+ "100x100bb", image_suffix
+ )
+ yield self._candidate(
+ url=fallback_art_url, match=Candidate.MATCH_FALLBACK
+ )
except KeyError as e:
- self._log.debug('Malformed itunes candidate: {} not found in {}',
- e,
- list(c.keys()))
+ self._log.debug(
+ "Malformed itunes candidate: {} not found in {}",
+ e,
+ list(c.keys()),
+ )
class Wikipedia(RemoteArtSource):
NAME = "Wikipedia (queried through DBpedia)"
- DBPEDIA_URL = 'https://dbpedia.org/sparql'
- WIKIPEDIA_URL = 'https://en.wikipedia.org/w/api.php'
- SPARQL_QUERY = '''PREFIX rdf:
+ DBPEDIA_URL = "https://dbpedia.org/sparql"
+ WIKIPEDIA_URL = "https://en.wikipedia.org/w/api.php"
+ SPARQL_QUERY = """PREFIX rdf:
PREFIX dbpprop:
PREFIX owl:
PREFIX rdfs:
@@ -698,7 +769,7 @@ class Wikipedia(RemoteArtSource):
?subject dbpprop:cover ?coverFilename .
FILTER ( regex(?name, "{album}", "i") )
}}
- Limit 1'''
+ Limit 1"""
def get(self, album, plugin, paths):
if not (album.albumartist and album.album):
@@ -711,28 +782,31 @@ def get(self, album, plugin, paths):
dbpedia_response = self.request(
self.DBPEDIA_URL,
params={
- 'format': 'application/sparql-results+json',
- 'timeout': 2500,
- 'query': self.SPARQL_QUERY.format(
- artist=album.albumartist.title(), album=album.album)
+ "format": "application/sparql-results+json",
+ "timeout": 2500,
+ "query": self.SPARQL_QUERY.format(
+ artist=album.albumartist.title(), album=album.album
+ ),
},
- headers={'content-type': 'application/json'},
+ headers={"content-type": "application/json"},
)
except requests.RequestException:
- self._log.debug('dbpedia: error receiving response')
+ self._log.debug("dbpedia: error receiving response")
return
try:
data = dbpedia_response.json()
- results = data['results']['bindings']
+ results = data["results"]["bindings"]
if results:
- cover_filename = 'File:' + results[0]['coverFilename']['value']
- page_id = results[0]['pageId']['value']
+ cover_filename = "File:" + results[0]["coverFilename"]["value"]
+ page_id = results[0]["pageId"]["value"]
else:
- self._log.debug('wikipedia: album not found on dbpedia')
+ self._log.debug("wikipedia: album not found on dbpedia")
except (ValueError, KeyError, IndexError):
- self._log.debug('wikipedia: error scraping dbpedia response: {}',
- dbpedia_response.text)
+ self._log.debug(
+ "wikipedia: error scraping dbpedia response: {}",
+ dbpedia_response.text,
+ )
# Ensure we have a filename before attempting to query wikipedia
if not (cover_filename and page_id):
@@ -743,43 +817,44 @@ def get(self, album, plugin, paths):
# An additional Wikipedia call can help to find the real filename.
# This may be removed once the DBPedia issue is resolved, see:
# https://github.com/dbpedia/extraction-framework/issues/396
- if ' .' in cover_filename and \
- '.' not in cover_filename.split(' .')[-1]:
+ if " ." in cover_filename and "." not in cover_filename.split(" .")[-1]:
self._log.debug(
- 'wikipedia: dbpedia provided incomplete cover_filename'
+ "wikipedia: dbpedia provided incomplete cover_filename"
)
- lpart, rpart = cover_filename.rsplit(' .', 1)
+ lpart, rpart = cover_filename.rsplit(" .", 1)
# Query all the images in the page
try:
wikipedia_response = self.request(
self.WIKIPEDIA_URL,
params={
- 'format': 'json',
- 'action': 'query',
- 'continue': '',
- 'prop': 'images',
- 'pageids': page_id,
+ "format": "json",
+ "action": "query",
+ "continue": "",
+ "prop": "images",
+ "pageids": page_id,
},
- headers={'content-type': 'application/json'},
+ headers={"content-type": "application/json"},
)
except requests.RequestException:
- self._log.debug('wikipedia: error receiving response')
+ self._log.debug("wikipedia: error receiving response")
return
# Try to see if one of the images on the pages matches our
# incomplete cover_filename
try:
data = wikipedia_response.json()
- results = data['query']['pages'][page_id]['images']
+ results = data["query"]["pages"][page_id]["images"]
for result in results:
- if re.match(re.escape(lpart) + r'.*?\.' + re.escape(rpart),
- result['title']):
- cover_filename = result['title']
+ if re.match(
+ re.escape(lpart) + r".*?\." + re.escape(rpart),
+ result["title"],
+ ):
+ cover_filename = result["title"]
break
except (ValueError, KeyError):
self._log.debug(
- 'wikipedia: failed to retrieve a cover_filename'
+ "wikipedia: failed to retrieve a cover_filename"
)
return
@@ -788,28 +863,29 @@ def get(self, album, plugin, paths):
wikipedia_response = self.request(
self.WIKIPEDIA_URL,
params={
- 'format': 'json',
- 'action': 'query',
- 'continue': '',
- 'prop': 'imageinfo',
- 'iiprop': 'url',
- 'titles': cover_filename.encode('utf-8'),
+ "format": "json",
+ "action": "query",
+ "continue": "",
+ "prop": "imageinfo",
+ "iiprop": "url",
+ "titles": cover_filename.encode("utf-8"),
},
- headers={'content-type': 'application/json'},
+ headers={"content-type": "application/json"},
)
except requests.RequestException:
- self._log.debug('wikipedia: error receiving response')
+ self._log.debug("wikipedia: error receiving response")
return
try:
data = wikipedia_response.json()
- results = data['query']['pages']
+ results = data["query"]["pages"]
for _, result in results.items():
- image_url = result['imageinfo'][0]['url']
- yield self._candidate(url=image_url,
- match=Candidate.MATCH_EXACT)
+ image_url = result["imageinfo"][0]["url"]
+ yield self._candidate(
+ url=image_url, match=Candidate.MATCH_EXACT
+ )
except (ValueError, KeyError, IndexError):
- self._log.debug('wikipedia: error scraping imageinfo')
+ self._log.debug("wikipedia: error scraping imageinfo")
return
@@ -827,13 +903,12 @@ def filename_priority(filename, cover_names):
return [idx for (idx, x) in enumerate(cover_names) if x in filename]
def get(self, album, plugin, paths):
- """Look for album art files in the specified directories.
- """
+ """Look for album art files in the specified directories."""
if not paths:
return
cover_names = list(map(util.bytestring_path, plugin.cover_names))
- cover_names_str = b'|'.join(cover_names)
- cover_pat = br''.join([br"(\b|_)(", cover_names_str, br")(\b|_)"])
+ cover_names_str = b"|".join(cover_names)
+ cover_pat = rb"".join([rb"(\b|_)(", cover_names_str, rb")(\b|_)"])
for path in paths:
if not os.path.isdir(syspath(path)):
@@ -841,67 +916,80 @@ def get(self, album, plugin, paths):
# Find all files that look like images in the directory.
images = []
- ignore = config['ignore'].as_str_seq()
- ignore_hidden = config['ignore_hidden'].get(bool)
- for _, _, files in sorted_walk(path, ignore=ignore,
- ignore_hidden=ignore_hidden):
+ ignore = config["ignore"].as_str_seq()
+ ignore_hidden = config["ignore_hidden"].get(bool)
+ for _, _, files in sorted_walk(
+ path, ignore=ignore, ignore_hidden=ignore_hidden
+ ):
for fn in files:
fn = bytestring_path(fn)
for ext in IMAGE_EXTENSIONS:
- if fn.lower().endswith(b'.' + ext) and \
- os.path.isfile(syspath(os.path.join(path, fn))):
+ if fn.lower().endswith(b"." + ext) and os.path.isfile(
+ syspath(os.path.join(path, fn))
+ ):
images.append(fn)
# Look for "preferred" filenames.
- images = sorted(images,
- key=lambda x:
- self.filename_priority(x, cover_names))
+ images = sorted(
+ images, key=lambda x: self.filename_priority(x, cover_names)
+ )
remaining = []
for fn in images:
if re.search(cover_pat, os.path.splitext(fn)[0], re.I):
- self._log.debug('using well-named art file {0}',
- util.displayable_path(fn))
- yield self._candidate(path=os.path.join(path, fn),
- match=Candidate.MATCH_EXACT)
+ self._log.debug(
+ "using well-named art file {0}",
+ util.displayable_path(fn),
+ )
+ yield self._candidate(
+ path=os.path.join(path, fn), match=Candidate.MATCH_EXACT
+ )
else:
remaining.append(fn)
# Fall back to any image in the folder.
if remaining and not plugin.cautious:
- self._log.debug('using fallback art file {0}',
- util.displayable_path(remaining[0]))
- yield self._candidate(path=os.path.join(path, remaining[0]),
- match=Candidate.MATCH_FALLBACK)
+ self._log.debug(
+ "using fallback art file {0}",
+ util.displayable_path(remaining[0]),
+ )
+ yield self._candidate(
+ path=os.path.join(path, remaining[0]),
+ match=Candidate.MATCH_FALLBACK,
+ )
class LastFM(RemoteArtSource):
NAME = "Last.fm"
# Sizes in priority order.
- SIZES = OrderedDict([
- ('mega', (300, 300)),
- ('extralarge', (300, 300)),
- ('large', (174, 174)),
- ('medium', (64, 64)),
- ('small', (34, 34)),
- ])
-
- API_URL = 'https://ws.audioscrobbler.com/2.0'
+ SIZES = OrderedDict(
+ [
+ ("mega", (300, 300)),
+ ("extralarge", (300, 300)),
+ ("large", (174, 174)),
+ ("medium", (64, 64)),
+ ("small", (34, 34)),
+ ]
+ )
+
+ API_URL = "https://ws.audioscrobbler.com/2.0"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.key = self._config['lastfm_key'].get(),
+ self.key = (self._config["lastfm_key"].get(),)
@staticmethod
def add_default_config(config):
- config.add({
- 'lastfm_key': None,
- })
- config['lastfm_key'].redact = True
+ config.add(
+ {
+ "lastfm_key": None,
+ }
+ )
+ config["lastfm_key"].redact = True
@classmethod
def available(cls, log, config):
- has_key = bool(config['lastfm_key'].get())
+ has_key = bool(config["lastfm_key"].get())
if not has_key:
log.debug("lastfm: Disabling art source due to missing key")
return has_key
@@ -911,60 +999,72 @@ def get(self, album, plugin, paths):
return
try:
- response = self.request(self.API_URL, params={
- 'method': 'album.getinfo',
- 'api_key': self.key,
- 'mbid': album.mb_albumid,
- 'format': 'json',
- })
+ response = self.request(
+ self.API_URL,
+ params={
+ "method": "album.getinfo",
+ "api_key": self.key,
+ "mbid": album.mb_albumid,
+ "format": "json",
+ },
+ )
except requests.RequestException:
- self._log.debug('lastfm: error receiving response')
+ self._log.debug("lastfm: error receiving response")
return
try:
data = response.json()
- if 'error' in data:
- if data['error'] == 6:
- self._log.debug('lastfm: no results for {}',
- album.mb_albumid)
+ if "error" in data:
+ if data["error"] == 6:
+ self._log.debug(
+ "lastfm: no results for {}", album.mb_albumid
+ )
else:
self._log.error(
- 'lastfm: failed to get album info: {} ({})',
- data['message'], data['error'])
+ "lastfm: failed to get album info: {} ({})",
+ data["message"],
+ data["error"],
+ )
else:
- images = {image['size']: image['#text']
- for image in data['album']['image']}
+ images = {
+ image["size"]: image["#text"]
+ for image in data["album"]["image"]
+ }
# Provide candidates in order of size.
for size in self.SIZES.keys():
if size in images:
- yield self._candidate(url=images[size],
- size=self.SIZES[size])
+ yield self._candidate(
+ url=images[size], size=self.SIZES[size]
+ )
except ValueError:
- self._log.debug('lastfm: error loading response: {}'
- .format(response.text))
+ self._log.debug(
+ "lastfm: error loading response: {}".format(response.text)
+ )
return
class Spotify(RemoteArtSource):
NAME = "Spotify"
- SPOTIFY_ALBUM_URL = 'https://open.spotify.com/album/'
+ SPOTIFY_ALBUM_URL = "https://open.spotify.com/album/"
@classmethod
def available(cls, log, config):
if not HAS_BEAUTIFUL_SOUP:
- log.debug('To use Spotify as an album art source, '
- 'you must install the beautifulsoup4 module. See '
- 'the documentation for further details.')
+ log.debug(
+ "To use Spotify as an album art source, "
+ "you must install the beautifulsoup4 module. See "
+ "the documentation for further details."
+ )
return HAS_BEAUTIFUL_SOUP
def get(self, album, plugin, paths):
try:
url = self.SPOTIFY_ALBUM_URL + album.items().get().spotify_album_id
except AttributeError:
- self._log.debug('Fetchart: no Spotify album ID found')
+ self._log.debug("Fetchart: no Spotify album ID found")
return
try:
response = requests.get(url)
@@ -974,14 +1074,15 @@ def get(self, album, plugin, paths):
return
try:
html = response.text
- soup = BeautifulSoup(html, 'html.parser')
- image_url = soup.find('meta',
- attrs={'property': 'og:image'})['content']
- yield self._candidate(url=image_url,
- match=Candidate.MATCH_EXACT)
+ soup = BeautifulSoup(html, "html.parser")
+ image_url = soup.find("meta", attrs={"property": "og:image"})[
+ "content"
+ ]
+ yield self._candidate(url=image_url, match=Candidate.MATCH_EXACT)
except ValueError:
- self._log.debug('Spotify: error loading response: {}'
- .format(response.text))
+ self._log.debug(
+ "Spotify: error loading response: {}".format(response.text)
+ )
return
@@ -1001,14 +1102,14 @@ def get(self, album, plugin, paths):
image_url = album.cover_art_url
else:
image_url = album.items().get().cover_art_url
- self._log.debug(f'Cover art URL {image_url} found for {album}')
+ self._log.debug(f"Cover art URL {image_url} found for {album}")
except (AttributeError, TypeError):
- self._log.debug(f'Cover art URL not found for {album}')
+ self._log.debug(f"Cover art URL not found for {album}")
return
if image_url:
yield self._candidate(url=image_url, match=Candidate.MATCH_EXACT)
else:
- self._log.debug(f'Cover art URL not found for {album}')
+ self._log.debug(f"Cover art URL not found for {album}")
return
@@ -1017,21 +1118,31 @@ def get(self, album, plugin, paths):
# Note that SOURCES_ALL is redundant (and presently unused). However, we keep
# it around nn order not break plugins that "register" (a.k.a. monkey-patch)
# their own fetchart sources.
-SOURCES_ALL = ['filesystem', 'coverart', 'itunes', 'amazon', 'albumart',
- 'wikipedia', 'google', 'fanarttv', 'lastfm', 'spotify']
+SOURCES_ALL = [
+ "filesystem",
+ "coverart",
+ "itunes",
+ "amazon",
+ "albumart",
+ "wikipedia",
+ "google",
+ "fanarttv",
+ "lastfm",
+ "spotify",
+]
ART_SOURCES = {
- 'filesystem': FileSystem,
- 'coverart': CoverArtArchive,
- 'itunes': ITunesStore,
- 'albumart': AlbumArtOrg,
- 'amazon': Amazon,
- 'wikipedia': Wikipedia,
- 'google': GoogleImages,
- 'fanarttv': FanartTV,
- 'lastfm': LastFM,
- 'spotify': Spotify,
- 'cover_art_url': CoverArtUrl,
+ "filesystem": FileSystem,
+ "coverart": CoverArtArchive,
+ "itunes": ITunesStore,
+ "albumart": AlbumArtOrg,
+ "amazon": Amazon,
+ "wikipedia": Wikipedia,
+ "google": GoogleImages,
+ "fanarttv": FanartTV,
+ "lastfm": LastFM,
+ "spotify": Spotify,
+ "cover_art_url": CoverArtUrl,
}
SOURCE_NAMES = {v: k for k, v in ART_SOURCES.items()}
@@ -1049,104 +1160,127 @@ def __init__(self):
# fetching them and placing them in the filesystem.
self.art_candidates = {}
- self.config.add({
- 'auto': True,
- 'minwidth': 0,
- 'maxwidth': 0,
- 'quality': 0,
- 'max_filesize': 0,
- 'enforce_ratio': False,
- 'cautious': False,
- 'cover_names': ['cover', 'front', 'art', 'album', 'folder'],
- 'sources': ['filesystem', 'coverart', 'itunes', 'amazon',
- 'albumart', 'cover_art_url'],
- 'store_source': False,
- 'high_resolution': False,
- 'deinterlace': False,
- 'cover_format': None,
- })
+ self.config.add(
+ {
+ "auto": True,
+ "minwidth": 0,
+ "maxwidth": 0,
+ "quality": 0,
+ "max_filesize": 0,
+ "enforce_ratio": False,
+ "cautious": False,
+ "cover_names": ["cover", "front", "art", "album", "folder"],
+ "sources": [
+ "filesystem",
+ "coverart",
+ "itunes",
+ "amazon",
+ "albumart",
+ "cover_art_url",
+ ],
+ "store_source": False,
+ "high_resolution": False,
+ "deinterlace": False,
+ "cover_format": None,
+ }
+ )
for source in ART_SOURCES.values():
source.add_default_config(self.config)
- self.minwidth = self.config['minwidth'].get(int)
- self.maxwidth = self.config['maxwidth'].get(int)
- self.max_filesize = self.config['max_filesize'].get(int)
- self.quality = self.config['quality'].get(int)
+ self.minwidth = self.config["minwidth"].get(int)
+ self.maxwidth = self.config["maxwidth"].get(int)
+ self.max_filesize = self.config["max_filesize"].get(int)
+ self.quality = self.config["quality"].get(int)
# allow both pixel and percentage-based margin specifications
- self.enforce_ratio = self.config['enforce_ratio'].get(
- confuse.OneOf([bool,
- confuse.String(pattern=self.PAT_PX),
- confuse.String(pattern=self.PAT_PERCENT)]))
+ self.enforce_ratio = self.config["enforce_ratio"].get(
+ confuse.OneOf(
+ [
+ bool,
+ confuse.String(pattern=self.PAT_PX),
+ confuse.String(pattern=self.PAT_PERCENT),
+ ]
+ )
+ )
self.margin_px = None
self.margin_percent = None
- self.deinterlace = self.config['deinterlace'].get(bool)
+ self.deinterlace = self.config["deinterlace"].get(bool)
if type(self.enforce_ratio) is str:
- if self.enforce_ratio[-1] == '%':
+ if self.enforce_ratio[-1] == "%":
self.margin_percent = float(self.enforce_ratio[:-1]) / 100
- elif self.enforce_ratio[-2:] == 'px':
+ elif self.enforce_ratio[-2:] == "px":
self.margin_px = int(self.enforce_ratio[:-2])
else:
# shouldn't happen
raise confuse.ConfigValueError()
self.enforce_ratio = True
- cover_names = self.config['cover_names'].as_str_seq()
+ cover_names = self.config["cover_names"].as_str_seq()
self.cover_names = list(map(util.bytestring_path, cover_names))
- self.cautious = self.config['cautious'].get(bool)
- self.store_source = self.config['store_source'].get(bool)
+ self.cautious = self.config["cautious"].get(bool)
+ self.store_source = self.config["store_source"].get(bool)
- self.src_removed = (config['import']['delete'].get(bool) or
- config['import']['move'].get(bool))
+ self.src_removed = config["import"]["delete"].get(bool) or config[
+ "import"
+ ]["move"].get(bool)
- self.cover_format = self.config['cover_format'].get(
+ self.cover_format = self.config["cover_format"].get(
confuse.Optional(str)
)
- if self.config['auto']:
+ if self.config["auto"]:
# Enable two import hooks when fetching is enabled.
self.import_stages = [self.fetch_art]
- self.register_listener('import_task_files', self.assign_art)
-
- available_sources = [(s_name, c)
- for (s_name, s_cls) in ART_SOURCES.items()
- if s_cls.available(self._log, self.config)
- for c in s_cls.VALID_MATCHING_CRITERIA]
+ self.register_listener("import_task_files", self.assign_art)
+
+ available_sources = [
+ (s_name, c)
+ for (s_name, s_cls) in ART_SOURCES.items()
+ if s_cls.available(self._log, self.config)
+ for c in s_cls.VALID_MATCHING_CRITERIA
+ ]
sources = plugins.sanitize_pairs(
- self.config['sources'].as_pairs(default_value='*'),
- available_sources)
+ self.config["sources"].as_pairs(default_value="*"),
+ available_sources,
+ )
- if 'remote_priority' in self.config:
+ if "remote_priority" in self.config:
self._log.warning(
- 'The `fetch_art.remote_priority` configuration option has '
- 'been deprecated. Instead, place `filesystem` at the end of '
- 'your `sources` list.')
- if self.config['remote_priority'].get(bool):
+ "The `fetch_art.remote_priority` configuration option has "
+ "been deprecated. Instead, place `filesystem` at the end of "
+ "your `sources` list."
+ )
+ if self.config["remote_priority"].get(bool):
fs = []
others = []
for s, c in sources:
- if s == 'filesystem':
+ if s == "filesystem":
fs.append((s, c))
else:
others.append((s, c))
sources = others + fs
- self.sources = [ART_SOURCES[s](self._log, self.config, match_by=[c])
- for s, c in sources]
+ self.sources = [
+ ART_SOURCES[s](self._log, self.config, match_by=[c])
+ for s, c in sources
+ ]
# Asynchronous; after music is added to the library.
def fetch_art(self, session, task):
"""Find art for the album being imported."""
if task.is_album: # Only fetch art for full albums.
- if (task.album.artpath
- and os.path.isfile(syspath(task.album.artpath))):
+ if task.album.artpath and os.path.isfile(
+ syspath(task.album.artpath)
+ ):
# Album already has art (probably a re-import); skip it.
return
if task.choice_flag == importer.action.ASIS:
# For as-is imports, don't search Web sources for art.
local = True
- elif task.choice_flag in (importer.action.APPLY,
- importer.action.RETAG):
+ elif task.choice_flag in (
+ importer.action.APPLY,
+ importer.action.RETAG,
+ ):
# Search everywhere for art.
local = False
else:
@@ -1163,8 +1297,8 @@ def _set_art(self, album, candidate, delete=False):
if self.store_source:
# store the source of the chosen artwork in a flexible field
self._log.debug(
- "Storing art_source for {0.albumartist} - {0.album}",
- album)
+ "Storing art_source for {0.albumartist} - {0.album}", album
+ )
album.art_source = SOURCE_NAMES[type(candidate.source)]
album.store()
@@ -1181,21 +1315,29 @@ def assign_art(self, session, task):
# Manual album art fetching.
def commands(self):
- cmd = ui.Subcommand('fetchart', help='download album art')
+ cmd = ui.Subcommand("fetchart", help="download album art")
cmd.parser.add_option(
- '-f', '--force', dest='force',
- action='store_true', default=False,
- help='re-download art when already present'
+ "-f",
+ "--force",
+ dest="force",
+ action="store_true",
+ default=False,
+ help="re-download art when already present",
)
cmd.parser.add_option(
- '-q', '--quiet', dest='quiet',
- action='store_true', default=False,
- help='quiet mode: do not output albums that already have artwork'
+ "-q",
+ "--quiet",
+ dest="quiet",
+ action="store_true",
+ default=False,
+ help="quiet mode: do not output albums that already have artwork",
)
def func(lib, opts, args):
- self.batch_fetch_art(lib, lib.albums(ui.decargs(args)), opts.force,
- opts.quiet)
+ self.batch_fetch_art(
+ lib, lib.albums(ui.decargs(args)), opts.force, opts.quiet
+ )
+
cmd.func = func
return [cmd]
@@ -1214,7 +1356,7 @@ def art_for_album(self, album, paths, local_only=False):
for source in self.sources:
if source.IS_LOCAL or not local_only:
self._log.debug(
- 'trying source {0} for album {1.albumartist} - {1.album}',
+ "trying source {0} for album {1.albumartist} - {1.album}",
SOURCE_NAMES[type(source)],
album,
)
@@ -1225,8 +1367,10 @@ def art_for_album(self, album, paths, local_only=False):
if candidate.validate(self):
out = candidate
self._log.debug(
- 'using {0.LOC_STR} image {1}'.format(
- source, util.displayable_path(out.path)))
+ "using {0.LOC_STR} image {1}".format(
+ source, util.displayable_path(out.path)
+ )
+ )
break
# Remove temporary files for invalid candidates.
source.cleanup(candidate)
@@ -1243,12 +1387,16 @@ def batch_fetch_art(self, lib, albums, force, quiet):
fetchart CLI command.
"""
for album in albums:
- if (album.artpath and not force
- and os.path.isfile(syspath(album.artpath))):
+ if (
+ album.artpath
+ and not force
+ and os.path.isfile(syspath(album.artpath))
+ ):
if not quiet:
- message = ui.colorize('text_highlight_minor',
- 'has album art')
- self._log.info('{0}: {1}', album, message)
+ message = ui.colorize(
+ "text_highlight_minor", "has album art"
+ )
+ self._log.info("{0}: {1}", album, message)
else:
# In ordinary invocations, look for images on the
# filesystem. When forcing, however, always go to the Web
@@ -1258,7 +1406,7 @@ def batch_fetch_art(self, lib, albums, force, quiet):
candidate = self.art_for_album(album, local_paths)
if candidate:
self._set_art(album, candidate)
- message = ui.colorize('text_success', 'found album art')
+ message = ui.colorize("text_success", "found album art")
else:
- message = ui.colorize('text_error', 'no art found')
- self._log.info('{0}: {1}', album, message)
+ message = ui.colorize("text_error", "no art found")
+ self._log.info("{0}: {1}", album, message)
diff --git a/beetsplug/filefilter.py b/beetsplug/filefilter.py
index ec8fddb4fc..5618c1bd15 100644
--- a/beetsplug/filefilter.py
+++ b/beetsplug/filefilter.py
@@ -17,38 +17,40 @@
import re
+
from beets import config
-from beets.util import bytestring_path
-from beets.plugins import BeetsPlugin
from beets.importer import SingletonImportTask
+from beets.plugins import BeetsPlugin
+from beets.util import bytestring_path
class FileFilterPlugin(BeetsPlugin):
def __init__(self):
super().__init__()
- self.register_listener('import_task_created',
- self.import_task_created_event)
- self.config.add({
- 'path': '.*'
- })
+ self.register_listener(
+ "import_task_created", self.import_task_created_event
+ )
+ self.config.add({"path": ".*"})
- self.path_album_regex = \
- self.path_singleton_regex = \
- re.compile(bytestring_path(self.config['path'].get()))
+ self.path_album_regex = self.path_singleton_regex = re.compile(
+ bytestring_path(self.config["path"].get())
+ )
- if 'album_path' in self.config:
+ if "album_path" in self.config:
self.path_album_regex = re.compile(
- bytestring_path(self.config['album_path'].get()))
+ bytestring_path(self.config["album_path"].get())
+ )
- if 'singleton_path' in self.config:
+ if "singleton_path" in self.config:
self.path_singleton_regex = re.compile(
- bytestring_path(self.config['singleton_path'].get()))
+ bytestring_path(self.config["singleton_path"].get())
+ )
def import_task_created_event(self, session, task):
if task.items and len(task.items) > 0:
items_to_import = []
for item in task.items:
- if self.file_filter(item['path']):
+ if self.file_filter(item["path"]):
items_to_import.append(item)
if len(items_to_import) > 0:
task.items = items_to_import
@@ -58,7 +60,7 @@ def import_task_created_event(self, session, task):
return []
elif isinstance(task, SingletonImportTask):
- if not self.file_filter(task.item['path']):
+ if not self.file_filter(task.item["path"]):
return []
# If not filtered, return the original task unchanged.
@@ -68,10 +70,9 @@ def file_filter(self, full_path):
"""Checks if the configured regular expressions allow the import
of the file given in full_path.
"""
- import_config = dict(config['import'])
+ import_config = dict(config["import"])
full_path = bytestring_path(full_path)
- if 'singletons' not in import_config or not import_config[
- 'singletons']:
+ if "singletons" not in import_config or not import_config["singletons"]:
# Album
return self.path_album_regex.match(full_path) is not None
else:
diff --git a/beetsplug/fish.py b/beetsplug/fish.py
index cfb168d9a4..71ac857432 100644
--- a/beetsplug/fish.py
+++ b/beetsplug/fish.py
@@ -23,17 +23,19 @@
"""
-from beets.plugins import BeetsPlugin
+import os
+from operator import attrgetter
+
from beets import library, ui
+from beets.plugins import BeetsPlugin
from beets.ui import commands
-from operator import attrgetter
-import os
+
BL_NEED2 = """complete -c beet -n '__fish_beet_needs_command' {} {}\n"""
BL_USE3 = """complete -c beet -n '__fish_beet_using_command {}' {} {}\n"""
BL_SUBS = """complete -c beet -n '__fish_at_level {} ""' {} {}\n"""
BL_EXTRA3 = """complete -c beet -n '__fish_beet_use_extra {}' {} {}\n"""
-HEAD = '''
+HEAD = """
function __fish_beet_needs_command
set cmd (commandline -opc)
if test (count $cmd) -eq 1
@@ -62,31 +64,35 @@
end
return 1
end
-'''
+"""
class FishPlugin(BeetsPlugin):
-
def commands(self):
- cmd = ui.Subcommand('fish', help='generate Fish shell tab completions')
+ cmd = ui.Subcommand("fish", help="generate Fish shell tab completions")
cmd.func = self.run
- cmd.parser.add_option('-f', '--noFields', action='store_true',
- default=False,
- help='omit album/track field completions')
cmd.parser.add_option(
- '-e',
- '--extravalues',
- action='append',
- type='choice',
- choices=library.Item.all_keys() +
- library.Album.all_keys(),
- help='include specified field *values* in completions')
+ "-f",
+ "--noFields",
+ action="store_true",
+ default=False,
+ help="omit album/track field completions",
+ )
+ cmd.parser.add_option(
+ "-e",
+ "--extravalues",
+ action="append",
+ type="choice",
+ choices=library.Item.all_keys() + library.Album.all_keys(),
+ help="include specified field *values* in completions",
+ )
cmd.parser.add_option(
- '-o',
- '--output',
- default='~/.config/fish/completions/beet.fish',
- help='where to save the script. default: '
- '~/.config/fish/completions')
+ "-o",
+ "--output",
+ default="~/.config/fish/completions/beet.fish",
+ help="where to save the script. default: "
+ "~/.config/fish/completions",
+ )
return [cmd]
def run(self, lib, opts, args):
@@ -99,17 +105,16 @@ def run(self, lib, opts, args):
completion_file_path = os.path.expanduser(opts.output)
completion_dir = os.path.dirname(completion_file_path)
- if completion_dir != '':
+ if completion_dir != "":
os.makedirs(completion_dir, exist_ok=True)
nobasicfields = opts.noFields # Do not complete for album/track fields
extravalues = opts.extravalues # e.g., Also complete artists names
beetcmds = sorted(
- (commands.default_commands +
- commands.plugins.commands()),
- key=attrgetter('name'))
- fields = sorted(set(
- library.Album.all_keys() + library.Item.all_keys()))
+ (commands.default_commands + commands.plugins.commands()),
+ key=attrgetter("name"),
+ )
+ fields = sorted(set(library.Album.all_keys() + library.Item.all_keys()))
# Collect commands, their aliases, and their help text
cmd_names_help = []
for cmd in beetcmds:
@@ -120,19 +125,26 @@ def run(self, lib, opts, args):
# Concatenate the string
totstring = HEAD + "\n"
totstring += get_cmds_list([name[0] for name in cmd_names_help])
- totstring += '' if nobasicfields else get_standard_fields(fields)
- totstring += get_extravalues(lib, extravalues) if extravalues else ''
- totstring += "\n" + "# ====== {} =====".format(
- "setup basic beet completion") + "\n" * 2
+ totstring += "" if nobasicfields else get_standard_fields(fields)
+ totstring += get_extravalues(lib, extravalues) if extravalues else ""
+ totstring += (
+ "\n"
+ + "# ====== {} =====".format("setup basic beet completion")
+ + "\n" * 2
+ )
totstring += get_basic_beet_options()
- totstring += "\n" + "# ====== {} =====".format(
- "setup field completion for subcommands") + "\n"
- totstring += get_subcommands(
- cmd_names_help, nobasicfields, extravalues)
+ totstring += (
+ "\n"
+ + "# ====== {} =====".format(
+ "setup field completion for subcommands"
+ )
+ + "\n"
+ )
+ totstring += get_subcommands(cmd_names_help, nobasicfields, extravalues)
# Set up completion for all the command options
totstring += get_all_commands(beetcmds)
- with open(completion_file_path, 'w') as fish_file:
+ with open(completion_file_path, "w") as fish_file:
fish_file.write(totstring)
@@ -145,32 +157,31 @@ def _escape(name):
def get_cmds_list(cmds_names):
# Make a list of all Beets core & plugin commands
- substr = ''
- substr += (
- "set CMDS " + " ".join(cmds_names) + ("\n" * 2)
- )
+ substr = ""
+ substr += "set CMDS " + " ".join(cmds_names) + ("\n" * 2)
return substr
def get_standard_fields(fields):
# Make a list of album/track fields and append with ':'
fields = (field + ":" for field in fields)
- substr = ''
- substr += (
- "set FIELDS " + " ".join(fields) + ("\n" * 2)
- )
+ substr = ""
+ substr += "set FIELDS " + " ".join(fields) + ("\n" * 2)
return substr
def get_extravalues(lib, extravalues):
# Make a list of all values from an album/track field.
# 'beet ls albumartist: ' yields completions for ABBA, Beatles, etc.
- word = ''
+ word = ""
values_set = get_set_of_values_for_field(lib, extravalues)
for fld in extravalues:
- extraname = fld.upper() + 'S'
+ extraname = fld.upper() + "S"
word += (
- "set " + extraname + " " + " ".join(sorted(values_set[fld]))
+ "set "
+ + extraname
+ + " "
+ + " ".join(sorted(values_set[fld]))
+ ("\n" * 2)
)
return word
@@ -189,21 +200,24 @@ def get_set_of_values_for_field(lib, fields):
def get_basic_beet_options():
word = (
- BL_NEED2.format("-l format-item",
- "-f -d 'print with custom format'") +
- BL_NEED2.format("-l format-album",
- "-f -d 'print with custom format'") +
- BL_NEED2.format("-s l -l library",
- "-f -r -d 'library database file to use'") +
- BL_NEED2.format("-s d -l directory",
- "-f -r -d 'destination music directory'") +
- BL_NEED2.format("-s v -l verbose",
- "-f -d 'print debugging information'") +
-
- BL_NEED2.format("-s c -l config",
- "-f -r -d 'path to configuration file'") +
- BL_NEED2.format("-s h -l help",
- "-f -d 'print this help message and exit'"))
+ BL_NEED2.format("-l format-item", "-f -d 'print with custom format'")
+ + BL_NEED2.format("-l format-album", "-f -d 'print with custom format'")
+ + BL_NEED2.format(
+ "-s l -l library", "-f -r -d 'library database file to use'"
+ )
+ + BL_NEED2.format(
+ "-s d -l directory", "-f -r -d 'destination music directory'"
+ )
+ + BL_NEED2.format(
+ "-s v -l verbose", "-f -d 'print debugging information'"
+ )
+ + BL_NEED2.format(
+ "-s c -l config", "-f -r -d 'path to configuration file'"
+ )
+ + BL_NEED2.format(
+ "-s h -l help", "-f -d 'print this help message and exit'"
+ )
+ )
return word
@@ -213,27 +227,35 @@ def get_subcommands(cmd_name_and_help, nobasicfields, extravalues):
for cmdname, cmdhelp in cmd_name_and_help:
cmdname = _escape(cmdname)
- word += "\n" + "# ------ {} -------".format(
- "fieldsetups for " + cmdname) + "\n"
word += (
- BL_NEED2.format(
- ("-a " + cmdname),
- ("-f " + "-d " + wrap(clean_whitespace(cmdhelp)))))
+ "\n"
+ + "# ------ {} -------".format("fieldsetups for " + cmdname)
+ + "\n"
+ )
+ word += BL_NEED2.format(
+ ("-a " + cmdname), ("-f " + "-d " + wrap(clean_whitespace(cmdhelp)))
+ )
if nobasicfields is False:
- word += (
- BL_USE3.format(
- cmdname,
- ("-a " + wrap("$FIELDS")),
- ("-f " + "-d " + wrap("fieldname"))))
+ word += BL_USE3.format(
+ cmdname,
+ ("-a " + wrap("$FIELDS")),
+ ("-f " + "-d " + wrap("fieldname")),
+ )
if extravalues:
for f in extravalues:
setvar = wrap("$" + f.upper() + "S")
- word += " ".join(BL_EXTRA3.format(
- (cmdname + " " + f + ":"),
- ('-f ' + '-A ' + '-a ' + setvar),
- ('-d ' + wrap(f))).split()) + "\n"
+ word += (
+ " ".join(
+ BL_EXTRA3.format(
+ (cmdname + " " + f + ":"),
+ ("-f " + "-A " + "-a " + setvar),
+ ("-d " + wrap(f)),
+ ).split()
+ )
+ + "\n"
+ )
return word
@@ -247,30 +269,59 @@ def get_all_commands(beetcmds):
name = _escape(name)
word += "\n"
- word += ("\n" * 2) + "# ====== {} =====".format(
- "completions for " + name) + "\n"
+ word += (
+ ("\n" * 2)
+ + "# ====== {} =====".format("completions for " + name)
+ + "\n"
+ )
for option in cmd.parser._get_all_options()[1:]:
- cmd_l = (" -l " + option._long_opts[0].replace('--', '')
- )if option._long_opts else ''
- cmd_s = (" -s " + option._short_opts[0].replace('-', '')
- ) if option._short_opts else ''
- cmd_need_arg = ' -r ' if option.nargs in [1] else ''
- cmd_helpstr = (" -d " + wrap(' '.join(option.help.split()))
- ) if option.help else ''
- cmd_arglist = (' -a ' + wrap(" ".join(option.choices))
- ) if option.choices else ''
-
- word += " ".join(BL_USE3.format(
+ cmd_l = (
+ (" -l " + option._long_opts[0].replace("--", ""))
+ if option._long_opts
+ else ""
+ )
+ cmd_s = (
+ (" -s " + option._short_opts[0].replace("-", ""))
+ if option._short_opts
+ else ""
+ )
+ cmd_need_arg = " -r " if option.nargs in [1] else ""
+ cmd_helpstr = (
+ (" -d " + wrap(" ".join(option.help.split())))
+ if option.help
+ else ""
+ )
+ cmd_arglist = (
+ (" -a " + wrap(" ".join(option.choices)))
+ if option.choices
+ else ""
+ )
+
+ word += (
+ " ".join(
+ BL_USE3.format(
+ name,
+ (
+ cmd_need_arg
+ + cmd_s
+ + cmd_l
+ + " -f "
+ + cmd_arglist
+ ),
+ cmd_helpstr,
+ ).split()
+ )
+ + "\n"
+ )
+
+ word = word + " ".join(
+ BL_USE3.format(
name,
- (cmd_need_arg + cmd_s + cmd_l + " -f " + cmd_arglist),
- cmd_helpstr).split()) + "\n"
-
- word = (word + " ".join(BL_USE3.format(
- name,
- ("-s " + "h " + "-l " + "help" + " -f "),
- ('-d ' + wrap("print help") + "\n")
- ).split()))
+ ("-s " + "h " + "-l " + "help" + " -f "),
+ ("-d " + wrap("print help") + "\n"),
+ ).split()
+ )
return word
@@ -281,7 +332,7 @@ def clean_whitespace(word):
def wrap(word):
# Need " or ' around strings but watch out if they're in the string
- sptoken = '\"'
+ sptoken = '"'
if ('"') in word and ("'") in word:
word.replace('"', sptoken)
return '"' + word + '"'
diff --git a/beetsplug/freedesktop.py b/beetsplug/freedesktop.py
index ba4d58793a..a9a25279cf 100644
--- a/beetsplug/freedesktop.py
+++ b/beetsplug/freedesktop.py
@@ -16,20 +16,25 @@
"""
-from beets.plugins import BeetsPlugin
from beets import ui
+from beets.plugins import BeetsPlugin
class FreedesktopPlugin(BeetsPlugin):
def commands(self):
deprecated = ui.Subcommand(
"freedesktop",
- help="Print a message to redirect to thumbnails --dolphin")
+ help="Print a message to redirect to thumbnails --dolphin",
+ )
deprecated.func = self.deprecation_message
return [deprecated]
def deprecation_message(self, lib, opts, args):
- ui.print_("This plugin is deprecated. Its functionality is "
- "superseded by the 'thumbnails' plugin")
- ui.print_("'thumbnails --dolphin' replaces freedesktop. See doc & "
- "changelog for more information")
+ ui.print_(
+ "This plugin is deprecated. Its functionality is "
+ "superseded by the 'thumbnails' plugin"
+ )
+ ui.print_(
+ "'thumbnails --dolphin' replaces freedesktop. See doc & "
+ "changelog for more information"
+ )
diff --git a/beetsplug/fromfilename.py b/beetsplug/fromfilename.py
index 6d7e3d0096..103e829016 100644
--- a/beetsplug/fromfilename.py
+++ b/beetsplug/fromfilename.py
@@ -16,34 +16,34 @@
filename.
"""
-from beets import plugins
-from beets.util import displayable_path
import os
import re
+from beets import plugins
+from beets.util import displayable_path
+
# Filename field extraction patterns.
PATTERNS = [
- # Useful patterns.
- r'^(?P.+)[\-_](?P.+)[\-_](?P.*)$',
- r'^(?P
\s*]*)>', '\n', html)
- return re.sub(r'
\s*
', '\n', html)
+ html = re.sub(r"\s*]*)>", "\n", html)
+ return re.sub(r"
\s*
", "\n", html)
def scrape_lyrics_from_html(html):
"""Scrape lyrics from a URL. If no lyrics can be found, return None
instead.
"""
+
def is_text_notcode(text):
if not text:
return False
length = len(text)
- return (length > 20 and
- text.count(' ') > length / 25 and
- (text.find('{') == -1 or text.find(';') == -1))
+ return (
+ length > 20
+ and text.count(" ") > length / 25
+ and (text.find("{") == -1 or text.find(";") == -1)
+ )
+
html = _scrape_strip_cruft(html)
html = _scrape_merge_paragraphs(html)
# extract all long text blocks that are not code
- soup = try_parse_html(html,
- parse_only=SoupStrainer(string=is_text_notcode))
+ soup = try_parse_html(html, parse_only=SoupStrainer(string=is_text_notcode))
if not soup:
return None
@@ -602,55 +628,53 @@ class Google(Backend):
def __init__(self, config, log):
super().__init__(config, log)
- self.api_key = config['google_API_key'].as_str()
- self.engine_id = config['google_engine_ID'].as_str()
+ self.api_key = config["google_API_key"].as_str()
+ self.engine_id = config["google_engine_ID"].as_str()
def is_lyrics(self, text, artist=None):
- """Determine whether the text seems to be valid lyrics.
- """
+ """Determine whether the text seems to be valid lyrics."""
if not text:
return False
bad_triggers_occ = []
- nb_lines = text.count('\n')
+ nb_lines = text.count("\n")
if nb_lines <= 1:
self._log.debug("Ignoring too short lyrics '{0}'", text)
return False
elif nb_lines < 5:
- bad_triggers_occ.append('too_short')
+ bad_triggers_occ.append("too_short")
else:
# Lyrics look legit, remove credits to avoid being penalized
# further down
text = remove_credits(text)
- bad_triggers = ['lyrics', 'copyright', 'property', 'links']
+ bad_triggers = ["lyrics", "copyright", "property", "links"]
if artist:
bad_triggers += [artist]
for item in bad_triggers:
- bad_triggers_occ += [item] * len(re.findall(r'\W%s\W' % item,
- text, re.I))
+ bad_triggers_occ += [item] * len(
+ re.findall(r"\W%s\W" % item, text, re.I)
+ )
if bad_triggers_occ:
- self._log.debug('Bad triggers detected: {0}', bad_triggers_occ)
+ self._log.debug("Bad triggers detected: {0}", bad_triggers_occ)
return len(bad_triggers_occ) < 2
def slugify(self, text):
- """Normalize a string and remove non-alphanumeric characters.
- """
- text = re.sub(r"[-'_\s]", '_', text)
- text = re.sub(r"_+", '_', text).strip('_')
+ """Normalize a string and remove non-alphanumeric characters."""
+ text = re.sub(r"[-'_\s]", "_", text)
+ text = re.sub(r"_+", "_", text).strip("_")
pat = r"([^,\(]*)\((.*?)\)" # Remove content within parentheses
- text = re.sub(pat, r'\g<1>', text).strip()
+ text = re.sub(pat, r"\g<1>", text).strip()
try:
- text = unicodedata.normalize('NFKD', text).encode('ascii',
- 'ignore')
- text = str(re.sub(r'[-\s]+', ' ', text.decode('utf-8')))
+ text = unicodedata.normalize("NFKD", text).encode("ascii", "ignore")
+ text = str(re.sub(r"[-\s]+", " ", text.decode("utf-8")))
except UnicodeDecodeError:
self._log.exception("Failing to normalize '{0}'", text)
return text
- BY_TRANS = ['by', 'par', 'de', 'von']
- LYRICS_TRANS = ['lyrics', 'paroles', 'letras', 'liedtexte']
+ BY_TRANS = ["by", "par", "de", "von"]
+ LYRICS_TRANS = ["lyrics", "paroles", "letras", "liedtexte"]
def is_page_candidate(self, url_link, url_title, title, artist):
"""Return True if the URL title makes it a good candidate to be a
@@ -658,8 +682,9 @@ def is_page_candidate(self, url_link, url_title, title, artist):
"""
title = self.slugify(title.lower())
artist = self.slugify(artist.lower())
- sitename = re.search("//([^/]+)/.*",
- self.slugify(url_link.lower())).group(1)
+ sitename = re.search(
+ "//([^/]+)/.*", self.slugify(url_link.lower())
+ ).group(1)
url_title = self.slugify(url_title.lower())
# Check if URL title contains song title (exact match)
@@ -668,42 +693,47 @@ def is_page_candidate(self, url_link, url_title, title, artist):
# or try extracting song title from URL title and check if
# they are close enough
- tokens = [by + '_' + artist for by in self.BY_TRANS] + \
- [artist, sitename, sitename.replace('www.', '')] + \
- self.LYRICS_TRANS
+ tokens = (
+ [by + "_" + artist for by in self.BY_TRANS]
+ + [artist, sitename, sitename.replace("www.", "")]
+ + self.LYRICS_TRANS
+ )
tokens = [re.escape(t) for t in tokens]
- song_title = re.sub('(%s)' % '|'.join(tokens), '', url_title)
+ song_title = re.sub("(%s)" % "|".join(tokens), "", url_title)
- song_title = song_title.strip('_|')
- typo_ratio = .9
+ song_title = song_title.strip("_|")
+ typo_ratio = 0.9
ratio = difflib.SequenceMatcher(None, song_title, title).ratio()
return ratio >= typo_ratio
def fetch(self, artist, title):
query = f"{artist} {title}"
- url = 'https://www.googleapis.com/customsearch/v1?key=%s&cx=%s&q=%s' \
- % (self.api_key, self.engine_id,
- urllib.parse.quote(query.encode('utf-8')))
+ url = "https://www.googleapis.com/customsearch/v1?key=%s&cx=%s&q=%s" % (
+ self.api_key,
+ self.engine_id,
+ urllib.parse.quote(query.encode("utf-8")),
+ )
data = self.fetch_url(url)
if not data:
- self._log.debug('google backend returned no data')
+ self._log.debug("google backend returned no data")
return None
try:
data = json.loads(data)
except ValueError as exc:
- self._log.debug('google backend returned malformed JSON: {}', exc)
- if 'error' in data:
- reason = data['error']['errors'][0]['reason']
- self._log.debug('google backend error: {0}', reason)
+ self._log.debug("google backend returned malformed JSON: {}", exc)
+ if "error" in data:
+ reason = data["error"]["errors"][0]["reason"]
+ self._log.debug("google backend error: {0}", reason)
return None
- if 'items' in data.keys():
- for item in data['items']:
- url_link = item['link']
- url_title = item.get('title', '')
- if not self.is_page_candidate(url_link, url_title,
- title, artist):
+ if "items" in data.keys():
+ for item in data["items"]:
+ url_link = item["link"]
+ url_title = item.get("title", "")
+ if not self.is_page_candidate(
+ url_link, url_title, title, artist
+ ):
continue
html = self.fetch_url(url_link)
if not html:
@@ -713,51 +743,51 @@ def fetch(self, artist, title):
continue
if self.is_lyrics(lyrics, artist):
- self._log.debug('got lyrics from {0}',
- item['displayLink'])
+ self._log.debug("got lyrics from {0}", item["displayLink"])
return lyrics
return None
class LyricsPlugin(plugins.BeetsPlugin):
- SOURCES = ['google', 'musixmatch', 'genius', 'tekstowo']
+ SOURCES = ["google", "musixmatch", "genius", "tekstowo"]
SOURCE_BACKENDS = {
- 'google': Google,
- 'musixmatch': MusiXmatch,
- 'genius': Genius,
- 'tekstowo': Tekstowo,
+ "google": Google,
+ "musixmatch": MusiXmatch,
+ "genius": Genius,
+ "tekstowo": Tekstowo,
}
def __init__(self):
super().__init__()
self.import_stages = [self.imported]
- self.config.add({
- 'auto': True,
- 'bing_client_secret': None,
- 'bing_lang_from': [],
- 'bing_lang_to': None,
- 'google_API_key': None,
- 'google_engine_ID': '009217259823014548361:lndtuqkycfu',
- 'genius_api_key':
- "Ryq93pUGm8bM6eUWwD_M3NOFFDAtp2yEE7W"
+ self.config.add(
+ {
+ "auto": True,
+ "bing_client_secret": None,
+ "bing_lang_from": [],
+ "bing_lang_to": None,
+ "google_API_key": None,
+ "google_engine_ID": "009217259823014548361:lndtuqkycfu",
+ "genius_api_key": "Ryq93pUGm8bM6eUWwD_M3NOFFDAtp2yEE7W"
"76V-uFL5jks5dNvcGCdarqFjDhP9c",
- 'fallback': None,
- 'force': False,
- 'local': False,
- # Musixmatch is disabled by default as they are currently blocking
- # requests with the beets user agent.
- 'sources': [s for s in self.SOURCES if s != "musixmatch"],
- 'dist_thresh': 0.1,
- })
- self.config['bing_client_secret'].redact = True
- self.config['google_API_key'].redact = True
- self.config['google_engine_ID'].redact = True
- self.config['genius_api_key'].redact = True
+ "fallback": None,
+ "force": False,
+ "local": False,
+ # Musixmatch is disabled by default as they are currently blocking
+ # requests with the beets user agent.
+ "sources": [s for s in self.SOURCES if s != "musixmatch"],
+ "dist_thresh": 0.1,
+ }
+ )
+ self.config["bing_client_secret"].redact = True
+ self.config["google_API_key"].redact = True
+ self.config["google_engine_ID"].redact = True
+ self.config["genius_api_key"].redact = True
# State information for the ReST writer.
# First, the current artist we're writing.
- self.artist = 'Unknown artist'
+ self.artist = "Unknown artist"
# The current album: False means no album yet.
self.album = False
# The current rest file content. None means the file is not
@@ -766,41 +796,49 @@ def __init__(self):
available_sources = list(self.SOURCES)
sources = plugins.sanitize_choices(
- self.config['sources'].as_str_seq(), available_sources)
+ self.config["sources"].as_str_seq(), available_sources
+ )
if not HAS_BEAUTIFUL_SOUP:
sources = self.sanitize_bs_sources(sources)
- if 'google' in sources:
- if not self.config['google_API_key'].get():
+ if "google" in sources:
+ if not self.config["google_API_key"].get():
# We log a *debug* message here because the default
# configuration includes `google`. This way, the source
# is silent by default but can be enabled just by
# setting an API key.
- self._log.debug('Disabling google source: '
- 'no API key configured.')
- sources.remove('google')
-
- self.config['bing_lang_from'] = [
- x.lower() for x in self.config['bing_lang_from'].as_str_seq()]
+ self._log.debug(
+ "Disabling google source: " "no API key configured."
+ )
+ sources.remove("google")
+
+ self.config["bing_lang_from"] = [
+ x.lower() for x in self.config["bing_lang_from"].as_str_seq()
+ ]
self.bing_auth_token = None
- if not HAS_LANGDETECT and self.config['bing_client_secret'].get():
- self._log.warning('To use bing translations, you need to '
- 'install the langdetect module. See the '
- 'documentation for further details.')
+ if not HAS_LANGDETECT and self.config["bing_client_secret"].get():
+ self._log.warning(
+ "To use bing translations, you need to "
+ "install the langdetect module. See the "
+ "documentation for further details."
+ )
- self.backends = [self.SOURCE_BACKENDS[source](self.config, self._log)
- for source in sources]
+ self.backends = [
+ self.SOURCE_BACKENDS[source](self.config, self._log)
+ for source in sources
+ ]
def sanitize_bs_sources(self, sources):
enabled_sources = []
for source in sources:
if self.SOURCE_BACKENDS[source].REQUIRES_BS:
- self._log.debug('To use the %s lyrics source, you must '
- 'install the beautifulsoup4 module. See '
- 'the documentation for further details.'
- % source)
+ self._log.debug(
+ "To use the %s lyrics source, you must "
+ "install the beautifulsoup4 module. See "
+ "the documentation for further details." % source
+ )
else:
enabled_sources.append(source)
@@ -808,43 +846,60 @@ def sanitize_bs_sources(self, sources):
def get_bing_access_token(self):
params = {
- 'client_id': 'beets',
- 'client_secret': self.config['bing_client_secret'],
- 'scope': "https://api.microsofttranslator.com",
- 'grant_type': 'client_credentials',
+ "client_id": "beets",
+ "client_secret": self.config["bing_client_secret"],
+ "scope": "https://api.microsofttranslator.com",
+ "grant_type": "client_credentials",
}
- oauth_url = 'https://datamarket.accesscontrol.windows.net/v2/OAuth2-13'
- oauth_token = json.loads(requests.post(
- oauth_url,
- data=urllib.parse.urlencode(params)).content)
- if 'access_token' in oauth_token:
- return "Bearer " + oauth_token['access_token']
+ oauth_url = "https://datamarket.accesscontrol.windows.net/v2/OAuth2-13"
+ oauth_token = json.loads(
+ requests.post(
+ oauth_url, data=urllib.parse.urlencode(params)
+ ).content
+ )
+ if "access_token" in oauth_token:
+ return "Bearer " + oauth_token["access_token"]
else:
- self._log.warning('Could not get Bing Translate API access token.'
- ' Check your "bing_client_secret" password')
+ self._log.warning(
+ "Could not get Bing Translate API access token."
+ ' Check your "bing_client_secret" password'
+ )
def commands(self):
- cmd = ui.Subcommand('lyrics', help='fetch song lyrics')
+ cmd = ui.Subcommand("lyrics", help="fetch song lyrics")
cmd.parser.add_option(
- '-p', '--print', dest='printlyr',
- action='store_true', default=False,
- help='print lyrics to console',
+ "-p",
+ "--print",
+ dest="printlyr",
+ action="store_true",
+ default=False,
+ help="print lyrics to console",
)
cmd.parser.add_option(
- '-r', '--write-rest', dest='writerest',
- action='store', default=None, metavar='dir',
- help='write lyrics to given directory as ReST files',
+ "-r",
+ "--write-rest",
+ dest="writerest",
+ action="store",
+ default=None,
+ metavar="dir",
+ help="write lyrics to given directory as ReST files",
)
cmd.parser.add_option(
- '-f', '--force', dest='force_refetch',
- action='store_true', default=False,
- help='always re-download lyrics',
+ "-f",
+ "--force",
+ dest="force_refetch",
+ action="store_true",
+ default=False,
+ help="always re-download lyrics",
)
cmd.parser.add_option(
- '-l', '--local', dest='local_only',
- action='store_true', default=False,
- help='do not fetch missing lyrics',
+ "-l",
+ "--local",
+ dest="local_only",
+ action="store_true",
+ default=False,
+ help="do not fetch missing lyrics",
)
def func(lib, opts, args):
@@ -855,10 +910,12 @@ def func(lib, opts, args):
self.writerest_indexes(opts.writerest)
items = lib.items(ui.decargs(args))
for item in items:
- if not opts.local_only and not self.config['local']:
+ if not opts.local_only and not self.config["local"]:
self.fetch_item_lyrics(
- lib, item, write,
- opts.force_refetch or self.config['force'],
+ lib,
+ item,
+ write,
+ opts.force_refetch or self.config["force"],
)
if item.lyrics:
if opts.printlyr:
@@ -868,14 +925,21 @@ def func(lib, opts, args):
if opts.writerest and items:
# flush last artist & write to ReST
self.writerest(opts.writerest)
- ui.print_('ReST files generated. to build, use one of:')
- ui.print_(' sphinx-build -b html %s _build/html'
- % opts.writerest)
- ui.print_(' sphinx-build -b epub %s _build/epub'
- % opts.writerest)
- ui.print_((' sphinx-build -b latex %s _build/latex '
- '&& make -C _build/latex all-pdf')
- % opts.writerest)
+ ui.print_("ReST files generated. to build, use one of:")
+ ui.print_(
+ " sphinx-build -b html %s _build/html" % opts.writerest
+ )
+ ui.print_(
+ " sphinx-build -b epub %s _build/epub" % opts.writerest
+ )
+ ui.print_(
+ (
+ " sphinx-build -b latex %s _build/latex "
+ "&& make -C _build/latex all-pdf"
+ )
+ % opts.writerest
+ )
+
cmd.func = func
return [cmd]
@@ -890,29 +954,30 @@ def appendrest(self, directory, item):
# Write current file and start a new one ~ item.albumartist
self.writerest(directory)
self.artist = item.albumartist.strip()
- self.rest = "%s\n%s\n\n.. contents::\n :local:\n\n" \
- % (self.artist,
- '=' * len(self.artist))
+ self.rest = "%s\n%s\n\n.. contents::\n :local:\n\n" % (
+ self.artist,
+ "=" * len(self.artist),
+ )
if self.album != item.album:
tmpalbum = self.album = item.album.strip()
- if self.album == '':
- tmpalbum = 'Unknown album'
- self.rest += "{}\n{}\n\n".format(tmpalbum, '-' * len(tmpalbum))
+ if self.album == "":
+ tmpalbum = "Unknown album"
+ self.rest += "{}\n{}\n\n".format(tmpalbum, "-" * len(tmpalbum))
title_str = ":index:`%s`" % item.title.strip()
- block = '| ' + item.lyrics.replace('\n', '\n| ')
- self.rest += "{}\n{}\n\n{}\n\n".format(title_str,
- '~' * len(title_str),
- block)
+ block = "| " + item.lyrics.replace("\n", "\n| ")
+ self.rest += "{}\n{}\n\n{}\n\n".format(
+ title_str, "~" * len(title_str), block
+ )
def writerest(self, directory):
- """Write self.rest to a ReST file
- """
+ """Write self.rest to a ReST file"""
if self.rest is not None and self.artist is not None:
- path = os.path.join(directory, 'artists',
- slug(self.artist) + '.rst')
- with open(path, 'wb') as output:
- output.write(self.rest.encode('utf-8'))
+ path = os.path.join(
+ directory, "artists", slug(self.artist) + ".rst"
+ )
+ with open(path, "wb") as output:
+ output.write(self.rest.encode("utf-8"))
def writerest_indexes(self, directory):
"""Write conf.py and index.rst files necessary for Sphinx
@@ -921,36 +986,36 @@ def writerest_indexes(self, directory):
to operate. We do not overwrite existing files so that
customizations are respected."""
try:
- os.makedirs(os.path.join(directory, 'artists'))
+ os.makedirs(os.path.join(directory, "artists"))
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise
- indexfile = os.path.join(directory, 'index.rst')
+ indexfile = os.path.join(directory, "index.rst")
if not os.path.exists(indexfile):
- with open(indexfile, 'w') as output:
+ with open(indexfile, "w") as output:
output.write(REST_INDEX_TEMPLATE)
- conffile = os.path.join(directory, 'conf.py')
+ conffile = os.path.join(directory, "conf.py")
if not os.path.exists(conffile):
- with open(conffile, 'w') as output:
+ with open(conffile, "w") as output:
output.write(REST_CONF_TEMPLATE)
def imported(self, session, task):
- """Import hook for fetching lyrics automatically.
- """
- if self.config['auto']:
+ """Import hook for fetching lyrics automatically."""
+ if self.config["auto"]:
for item in task.imported_items():
- self.fetch_item_lyrics(session.lib, item,
- False, self.config['force'])
+ self.fetch_item_lyrics(
+ session.lib, item, False, self.config["force"]
+ )
def fetch_item_lyrics(self, lib, item, write, force):
"""Fetch and store lyrics for a single item. If ``write``, then the
- lyrics will also be written to the file itself.
+ lyrics will also be written to the file itself.
"""
# Skip if the item already has lyrics.
if not force and item.lyrics:
- self._log.info('lyrics already present: {0}', item)
+ self._log.info("lyrics already present: {0}", item)
return
lyrics = None
@@ -962,18 +1027,19 @@ def fetch_item_lyrics(self, lib, item, write, force):
lyrics = "\n\n---\n\n".join([l for l in lyrics if l])
if lyrics:
- self._log.info('fetched lyrics: {0}', item)
- if HAS_LANGDETECT and self.config['bing_client_secret'].get():
+ self._log.info("fetched lyrics: {0}", item)
+ if HAS_LANGDETECT and self.config["bing_client_secret"].get():
lang_from = langdetect.detect(lyrics)
- if self.config['bing_lang_to'].get() != lang_from and (
- not self.config['bing_lang_from'] or (
- lang_from in self.config[
- 'bing_lang_from'].as_str_seq())):
+ if self.config["bing_lang_to"].get() != lang_from and (
+ not self.config["bing_lang_from"]
+ or (lang_from in self.config["bing_lang_from"].as_str_seq())
+ ):
lyrics = self.append_translation(
- lyrics, self.config['bing_lang_to'])
+ lyrics, self.config["bing_lang_to"]
+ )
else:
- self._log.info('lyrics not found: {0}', item)
- fallback = self.config['fallback'].get()
+ self._log.info("lyrics not found: {0}", item)
+ fallback = self.config["fallback"].get()
if fallback:
lyrics = fallback
else:
@@ -990,8 +1056,9 @@ def get_lyrics(self, artist, title):
for backend in self.backends:
lyrics = backend.fetch(artist, title)
if lyrics:
- self._log.debug('got lyrics from backend: {0}',
- backend.__class__.__name__)
+ self._log.debug(
+ "got lyrics from backend: {0}", backend.__class__.__name__
+ )
return _scrape_strip_cruft(lyrics, True)
def append_translation(self, text, to_lang):
@@ -1001,23 +1068,28 @@ def append_translation(self, text, to_lang):
self.bing_auth_token = self.get_bing_access_token()
if self.bing_auth_token:
# Extract unique lines to limit API request size per song
- text_lines = set(text.split('\n'))
- url = ('https://api.microsofttranslator.com/v2/Http.svc/'
- 'Translate?text=%s&to=%s' % ('|'.join(text_lines), to_lang))
- r = requests.get(url,
- headers={"Authorization ": self.bing_auth_token})
+ text_lines = set(text.split("\n"))
+ url = (
+ "https://api.microsofttranslator.com/v2/Http.svc/"
+ "Translate?text=%s&to=%s" % ("|".join(text_lines), to_lang)
+ )
+ r = requests.get(
+ url, headers={"Authorization ": self.bing_auth_token}
+ )
if r.status_code != 200:
- self._log.debug('translation API error {}: {}', r.status_code,
- r.text)
- if 'token has expired' in r.text:
+ self._log.debug(
+ "translation API error {}: {}", r.status_code, r.text
+ )
+ if "token has expired" in r.text:
self.bing_auth_token = None
return self.append_translation(text, to_lang)
return text
lines_translated = ElementTree.fromstring(
- r.text.encode('utf-8')).text
+ r.text.encode("utf-8")
+ ).text
# Use a translation mapping dict to build resulting lyrics
- translations = dict(zip(text_lines, lines_translated.split('|')))
- result = ''
- for line in text.split('\n'):
- result += '{} / {}\n'.format(line, translations[line])
+ translations = dict(zip(text_lines, lines_translated.split("|")))
+ result = ""
+ for line in text.split("\n"):
+ result += "{} / {}\n".format(line, translations[line])
return result
diff --git a/beetsplug/mbcollection.py b/beetsplug/mbcollection.py
index f4a0d161d4..1c010bf504 100644
--- a/beetsplug/mbcollection.py
+++ b/beetsplug/mbcollection.py
@@ -13,30 +13,29 @@
# included in all copies or substantial portions of the Software.
-from beets.plugins import BeetsPlugin
-from beets.ui import Subcommand
-from beets import ui
-from beets import config
+import re
+
import musicbrainzngs
-import re
+from beets import config, ui
+from beets.plugins import BeetsPlugin
+from beets.ui import Subcommand
SUBMISSION_CHUNK_SIZE = 200
FETCH_CHUNK_SIZE = 100
-UUID_REGEX = r'^[a-f0-9]{8}(-[a-f0-9]{4}){3}-[a-f0-9]{12}$'
+UUID_REGEX = r"^[a-f0-9]{8}(-[a-f0-9]{4}){3}-[a-f0-9]{12}$"
def mb_call(func, *args, **kwargs):
- """Call a MusicBrainz API function and catch exceptions.
- """
+ """Call a MusicBrainz API function and catch exceptions."""
try:
return func(*args, **kwargs)
except musicbrainzngs.AuthenticationError:
- raise ui.UserError('authentication with MusicBrainz failed')
+ raise ui.UserError("authentication with MusicBrainz failed")
except (musicbrainzngs.ResponseError, musicbrainzngs.NetworkError) as exc:
- raise ui.UserError(f'MusicBrainz API error: {exc}')
+ raise ui.UserError(f"MusicBrainz API error: {exc}")
except musicbrainzngs.UsageError:
- raise ui.UserError('MusicBrainz credentials missing')
+ raise ui.UserError("MusicBrainz credentials missing")
def submit_albums(collection_id, release_ids):
@@ -44,45 +43,45 @@ def submit_albums(collection_id, release_ids):
requests are made if there are many release IDs to submit.
"""
for i in range(0, len(release_ids), SUBMISSION_CHUNK_SIZE):
- chunk = release_ids[i:i + SUBMISSION_CHUNK_SIZE]
- mb_call(
- musicbrainzngs.add_releases_to_collection,
- collection_id, chunk
- )
+ chunk = release_ids[i : i + SUBMISSION_CHUNK_SIZE]
+ mb_call(musicbrainzngs.add_releases_to_collection, collection_id, chunk)
class MusicBrainzCollectionPlugin(BeetsPlugin):
def __init__(self):
super().__init__()
- config['musicbrainz']['pass'].redact = True
+ config["musicbrainz"]["pass"].redact = True
musicbrainzngs.auth(
- config['musicbrainz']['user'].as_str(),
- config['musicbrainz']['pass'].as_str(),
+ config["musicbrainz"]["user"].as_str(),
+ config["musicbrainz"]["pass"].as_str(),
)
- self.config.add({
- 'auto': False,
- 'collection': '',
- 'remove': False,
- })
- if self.config['auto']:
+ self.config.add(
+ {
+ "auto": False,
+ "collection": "",
+ "remove": False,
+ }
+ )
+ if self.config["auto"]:
self.import_stages = [self.imported]
def _get_collection(self):
collections = mb_call(musicbrainzngs.get_collections)
- if not collections['collection-list']:
- raise ui.UserError('no collections exist for user')
+ if not collections["collection-list"]:
+ raise ui.UserError("no collections exist for user")
# Get all collection IDs, avoiding event collections
- collection_ids = [x['id'] for x in collections['collection-list']]
+ collection_ids = [x["id"] for x in collections["collection-list"]]
if not collection_ids:
- raise ui.UserError('No collection found.')
+ raise ui.UserError("No collection found.")
# Check that the collection exists so we can present a nice error
- collection = self.config['collection'].as_str()
+ collection = self.config["collection"].as_str()
if collection:
if collection not in collection_ids:
- raise ui.UserError('invalid collection ID: {}'
- .format(collection))
+ raise ui.UserError(
+ "invalid collection ID: {}".format(collection)
+ )
return collection
# No specified collection. Just return the first collection ID
@@ -94,9 +93,9 @@ def _fetch(offset):
musicbrainzngs.get_releases_in_collection,
id,
limit=FETCH_CHUNK_SIZE,
- offset=offset
- )['collection']
- return [x['id'] for x in res['release-list']], res['release-count']
+ offset=offset,
+ )["collection"]
+ return [x["id"] for x in res["release-list"]], res["release-count"]
offset = 0
albums_in_collection, release_count = _fetch(offset)
@@ -107,13 +106,15 @@ def _fetch(offset):
return albums_in_collection
def commands(self):
- mbupdate = Subcommand('mbupdate',
- help='Update MusicBrainz collection')
- mbupdate.parser.add_option('-r', '--remove',
- action='store_true',
- default=None,
- dest='remove',
- help='Remove albums not in beets library')
+ mbupdate = Subcommand("mbupdate", help="Update MusicBrainz collection")
+ mbupdate.parser.add_option(
+ "-r",
+ "--remove",
+ action="store_true",
+ default=None,
+ dest="remove",
+ help="Remove albums not in beets library",
+ )
mbupdate.func = self.update_collection
return [mbupdate]
@@ -122,26 +123,25 @@ def remove_missing(self, collection_id, lib_albums):
albums_in_collection = self._get_albums_in_collection(collection_id)
remove_me = list(set(albums_in_collection) - lib_ids)
for i in range(0, len(remove_me), FETCH_CHUNK_SIZE):
- chunk = remove_me[i:i + FETCH_CHUNK_SIZE]
+ chunk = remove_me[i : i + FETCH_CHUNK_SIZE]
mb_call(
musicbrainzngs.remove_releases_from_collection,
- collection_id, chunk
+ collection_id,
+ chunk,
)
def update_collection(self, lib, opts, args):
self.config.set_args(opts)
- remove_missing = self.config['remove'].get(bool)
+ remove_missing = self.config["remove"].get(bool)
self.update_album_list(lib, lib.albums(), remove_missing)
def imported(self, session, task):
- """Add each imported album to the collection.
- """
+ """Add each imported album to the collection."""
if task.is_album:
self.update_album_list(session.lib, [task.album])
def update_album_list(self, lib, album_list, remove_missing=False):
- """Update the MusicBrainz collection from a list of Beets albums
- """
+ """Update the MusicBrainz collection from a list of Beets albums"""
collection_id = self._get_collection()
# Get a list of all the album IDs.
@@ -152,13 +152,11 @@ def update_album_list(self, lib, album_list, remove_missing=False):
if re.match(UUID_REGEX, aid):
album_ids.append(aid)
else:
- self._log.info('skipping invalid MBID: {0}', aid)
+ self._log.info("skipping invalid MBID: {0}", aid)
# Submit to MusicBrainz.
- self._log.info(
- 'Updating MusicBrainz collection {0}...', collection_id
- )
+ self._log.info("Updating MusicBrainz collection {0}...", collection_id)
submit_albums(collection_id, album_ids)
if remove_missing:
self.remove_missing(collection_id, lib.albums())
- self._log.info('...MusicBrainz collection updated.')
+ self._log.info("...MusicBrainz collection updated.")
diff --git a/beetsplug/mbsubmit.py b/beetsplug/mbsubmit.py
index 282349ff06..e4c0f372ea 100644
--- a/beetsplug/mbsubmit.py
+++ b/beetsplug/mbsubmit.py
@@ -26,7 +26,6 @@
from beets.autotag import Recommendation
from beets.plugins import BeetsPlugin
from beets.ui.commands import PromptChoice
-
from beetsplug.info import print_data
@@ -34,34 +33,40 @@ class MBSubmitPlugin(BeetsPlugin):
def __init__(self):
super().__init__()
- self.config.add({
- 'format': '$track. $title - $artist ($length)',
- 'threshold': 'medium',
- })
+ self.config.add(
+ {
+ "format": "$track. $title - $artist ($length)",
+ "threshold": "medium",
+ }
+ )
# Validate and store threshold.
- self.threshold = self.config['threshold'].as_choice({
- 'none': Recommendation.none,
- 'low': Recommendation.low,
- 'medium': Recommendation.medium,
- 'strong': Recommendation.strong
- })
-
- self.register_listener('before_choose_candidate',
- self.before_choose_candidate_event)
+ self.threshold = self.config["threshold"].as_choice(
+ {
+ "none": Recommendation.none,
+ "low": Recommendation.low,
+ "medium": Recommendation.medium,
+ "strong": Recommendation.strong,
+ }
+ )
+
+ self.register_listener(
+ "before_choose_candidate", self.before_choose_candidate_event
+ )
def before_choose_candidate_event(self, session, task):
if task.rec <= self.threshold:
- return [PromptChoice('p', 'Print tracks', self.print_tracks)]
+ return [PromptChoice("p", "Print tracks", self.print_tracks)]
def print_tracks(self, session, task):
for i in sorted(task.items, key=lambda i: i.track):
- print_data(None, i, self.config['format'].as_str())
+ print_data(None, i, self.config["format"].as_str())
def commands(self):
"""Add beet UI commands for mbsubmit."""
mbsubmit_cmd = ui.Subcommand(
- 'mbsubmit', help='Submit Tracks to MusicBrainz')
+ "mbsubmit", help="Submit Tracks to MusicBrainz"
+ )
def func(lib, opts, args):
items = lib.items(ui.decargs(args))
@@ -74,4 +79,4 @@ def func(lib, opts, args):
def _mbsubmit(self, items):
"""Print track information to be submitted to MusicBrainz."""
for i in sorted(items, key=lambda i: i.track):
- print_data(None, i, self.config['format'].as_str())
+ print_data(None, i, self.config["format"].as_str())
diff --git a/beetsplug/mbsync.py b/beetsplug/mbsync.py
index 2677883071..0e63a6f227 100644
--- a/beetsplug/mbsync.py
+++ b/beetsplug/mbsync.py
@@ -15,12 +15,12 @@
"""Update library's tags using MusicBrainz.
"""
-from beets.plugins import BeetsPlugin, apply_item_changes
-from beets import autotag, library, ui, util
-from beets.autotag import hooks
+import re
from collections import defaultdict
-import re
+from beets import autotag, library, ui, util
+from beets.autotag import hooks
+from beets.plugins import BeetsPlugin, apply_item_changes
MBID_REGEX = r"(\d|\w){8}-(\d|\w){4}-(\d|\w){4}-(\d|\w){4}-(\d|\w){12}"
@@ -30,28 +30,41 @@ def __init__(self):
super().__init__()
def commands(self):
- cmd = ui.Subcommand('mbsync',
- help='update metadata from musicbrainz')
+ cmd = ui.Subcommand("mbsync", help="update metadata from musicbrainz")
cmd.parser.add_option(
- '-p', '--pretend', action='store_true',
- help='show all changes but do nothing')
+ "-p",
+ "--pretend",
+ action="store_true",
+ help="show all changes but do nothing",
+ )
cmd.parser.add_option(
- '-m', '--move', action='store_true', dest='move',
- help="move files in the library directory")
+ "-m",
+ "--move",
+ action="store_true",
+ dest="move",
+ help="move files in the library directory",
+ )
cmd.parser.add_option(
- '-M', '--nomove', action='store_false', dest='move',
- help="don't move files in library")
+ "-M",
+ "--nomove",
+ action="store_false",
+ dest="move",
+ help="don't move files in library",
+ )
cmd.parser.add_option(
- '-W', '--nowrite', action='store_false',
- default=None, dest='write',
- help="don't write updated metadata to files")
+ "-W",
+ "--nowrite",
+ action="store_false",
+ default=None,
+ dest="write",
+ help="don't write updated metadata to files",
+ )
cmd.parser.add_format_option()
cmd.func = self.func
return [cmd]
def func(self, lib, opts, args):
- """Command handler for the mbsync function.
- """
+ """Command handler for the mbsync function."""
move = ui.should_move(opts.move)
pretend = opts.pretend
write = ui.should_write(opts.write)
@@ -64,25 +77,30 @@ def singletons(self, lib, query, move, pretend, write):
"""Retrieve and apply info from the autotagger for items matched by
query.
"""
- for item in lib.items(query + ['singleton:true']):
+ for item in lib.items(query + ["singleton:true"]):
item_formatted = format(item)
if not item.mb_trackid:
- self._log.info('Skipping singleton with no mb_trackid: {0}',
- item_formatted)
+ self._log.info(
+ "Skipping singleton with no mb_trackid: {0}", item_formatted
+ )
continue
# Do we have a valid MusicBrainz track ID?
if not re.match(MBID_REGEX, item.mb_trackid):
- self._log.info('Skipping singleton with invalid mb_trackid:' +
- ' {0}', item_formatted)
+ self._log.info(
+ "Skipping singleton with invalid mb_trackid:" + " {0}",
+ item_formatted,
+ )
continue
# Get the MusicBrainz recording info.
track_info = hooks.track_for_mbid(item.mb_trackid)
if not track_info:
- self._log.info('Recording ID not found: {0} for track {0}',
- item.mb_trackid,
- item_formatted)
+ self._log.info(
+ "Recording ID not found: {0} for track {0}",
+ item.mb_trackid,
+ item_formatted,
+ )
continue
# Apply.
@@ -98,24 +116,29 @@ def albums(self, lib, query, move, pretend, write):
for a in lib.albums(query):
album_formatted = format(a)
if not a.mb_albumid:
- self._log.info('Skipping album with no mb_albumid: {0}',
- album_formatted)
+ self._log.info(
+ "Skipping album with no mb_albumid: {0}", album_formatted
+ )
continue
items = list(a.items())
# Do we have a valid MusicBrainz album ID?
if not re.match(MBID_REGEX, a.mb_albumid):
- self._log.info('Skipping album with invalid mb_albumid: {0}',
- album_formatted)
+ self._log.info(
+ "Skipping album with invalid mb_albumid: {0}",
+ album_formatted,
+ )
continue
# Get the MusicBrainz album information.
album_info = hooks.album_for_mbid(a.mb_albumid)
if not album_info:
- self._log.info('Release ID {0} not found for album {1}',
- a.mb_albumid,
- album_formatted)
+ self._log.info(
+ "Release ID {0} not found for album {1}",
+ a.mb_albumid,
+ album_formatted,
+ )
continue
# Map release track and recording MBIDs to their information.
@@ -132,8 +155,10 @@ def albums(self, lib, query, move, pretend, write):
# work for albums that have missing or extra tracks.
mapping = {}
for item in items:
- if item.mb_releasetrackid and \
- item.mb_releasetrackid in releasetrack_index:
+ if (
+ item.mb_releasetrackid
+ and item.mb_releasetrackid in releasetrack_index
+ ):
mapping[item] = releasetrack_index[item.mb_releasetrackid]
else:
candidates = track_index[item.mb_trackid]
@@ -143,13 +168,15 @@ def albums(self, lib, query, move, pretend, write):
# If there are multiple copies of a recording, they are
# disambiguated using their disc and track number.
for c in candidates:
- if (c.medium_index == item.track and
- c.medium == item.disc):
+ if (
+ c.medium_index == item.track
+ and c.medium == item.disc
+ ):
mapping[item] = c
break
# Apply.
- self._log.debug('applying changes to {}', album_formatted)
+ self._log.debug("applying changes to {}", album_formatted)
with lib.transaction():
autotag.apply_metadata(album_info, mapping)
changed = False
@@ -174,5 +201,5 @@ def albums(self, lib, query, move, pretend, write):
# Move album art (and any inconsistent items).
if move and lib.directory in util.ancestry(items[0].path):
- self._log.debug('moving album {0}', album_formatted)
+ self._log.debug("moving album {0}", album_formatted)
a.move()
diff --git a/beetsplug/metasync/__init__.py b/beetsplug/metasync/__init__.py
index 361071fb5d..d17071b5bc 100644
--- a/beetsplug/metasync/__init__.py
+++ b/beetsplug/metasync/__init__.py
@@ -16,20 +16,20 @@
"""
-from abc import abstractmethod, ABCMeta
+from abc import ABCMeta, abstractmethod
from importlib import import_module
from confuse import ConfigValueError
+
from beets import ui
from beets.plugins import BeetsPlugin
-
-METASYNC_MODULE = 'beetsplug.metasync'
+METASYNC_MODULE = "beetsplug.metasync"
# Dictionary to map the MODULE and the CLASS NAME of meta sources
SOURCES = {
- 'amarok': 'Amarok',
- 'itunes': 'Itunes',
+ "amarok": "Amarok",
+ "itunes": "Itunes",
}
@@ -45,13 +45,13 @@ def sync_from_source(self, item):
def load_meta_sources():
- """ Returns a dictionary of all the MetaSources
+ """Returns a dictionary of all the MetaSources
E.g., {'itunes': Itunes} with isinstance(Itunes, MetaSource) true
"""
meta_sources = {}
for module_path, class_name in SOURCES.items():
- module = import_module(METASYNC_MODULE + '.' + module_path)
+ module = import_module(METASYNC_MODULE + "." + module_path)
meta_sources[class_name.lower()] = getattr(module, class_name)
return meta_sources
@@ -61,8 +61,7 @@ def load_meta_sources():
def load_item_types():
- """ Returns a dictionary containing the item_types of all the MetaSources
- """
+ """Returns a dictionary containing the item_types of all the MetaSources"""
item_types = {}
for meta_source in META_SOURCES.values():
item_types.update(meta_source.item_types)
@@ -70,42 +69,50 @@ def load_item_types():
class MetaSyncPlugin(BeetsPlugin):
-
item_types = load_item_types()
def __init__(self):
super().__init__()
def commands(self):
- cmd = ui.Subcommand('metasync',
- help='update metadata from music player libraries')
- cmd.parser.add_option('-p', '--pretend', action='store_true',
- help='show all changes but do nothing')
- cmd.parser.add_option('-s', '--source', default=[],
- action='append', dest='sources',
- help='comma-separated list of sources to sync')
+ cmd = ui.Subcommand(
+ "metasync", help="update metadata from music player libraries"
+ )
+ cmd.parser.add_option(
+ "-p",
+ "--pretend",
+ action="store_true",
+ help="show all changes but do nothing",
+ )
+ cmd.parser.add_option(
+ "-s",
+ "--source",
+ default=[],
+ action="append",
+ dest="sources",
+ help="comma-separated list of sources to sync",
+ )
cmd.parser.add_format_option()
cmd.func = self.func
return [cmd]
def func(self, lib, opts, args):
- """Command handler for the metasync function.
- """
+ """Command handler for the metasync function."""
pretend = opts.pretend
query = ui.decargs(args)
sources = []
for source in opts.sources:
- sources.extend(source.split(','))
+ sources.extend(source.split(","))
- sources = sources or self.config['source'].as_str_seq()
+ sources = sources or self.config["source"].as_str_seq()
meta_source_instances = {}
items = lib.items(query)
# Avoid needlessly instantiating meta sources (can be expensive)
if not items:
- self._log.info('No items found matching query')
+ self._log.info("No items found matching query")
return
# Instantiate the meta sources
@@ -113,18 +120,19 @@ def func(self, lib, opts, args):
try:
cls = META_SOURCES[player]
except KeyError:
- self._log.error('Unknown metadata source \'{}\''.format(
- player))
+ self._log.error("Unknown metadata source '{}'".format(player))
try:
meta_source_instances[player] = cls(self.config, self._log)
except (ImportError, ConfigValueError) as e:
- self._log.error('Failed to instantiate metadata source '
- '\'{}\': {}'.format(player, e))
+ self._log.error(
+ "Failed to instantiate metadata source "
+ "'{}': {}".format(player, e)
+ )
# Avoid needlessly iterating over items
if not meta_source_instances:
- self._log.error('No valid metadata sources found')
+ self._log.error("No valid metadata sources found")
return
# Sync the items with all of the meta sources
diff --git a/beetsplug/metasync/amarok.py b/beetsplug/metasync/amarok.py
index a49eecc305..195cd87875 100644
--- a/beetsplug/metasync/amarok.py
+++ b/beetsplug/metasync/amarok.py
@@ -16,35 +16,35 @@
"""
-from os.path import basename
from datetime import datetime
+from os.path import basename
from time import mktime
from xml.sax.saxutils import quoteattr
-from beets.util import displayable_path
from beets.dbcore import types
from beets.library import DateType
+from beets.util import displayable_path
from beetsplug.metasync import MetaSource
def import_dbus():
try:
- return __import__('dbus')
+ return __import__("dbus")
except ImportError:
return None
+
dbus = import_dbus()
class Amarok(MetaSource):
-
item_types = {
- 'amarok_rating': types.INTEGER,
- 'amarok_score': types.FLOAT,
- 'amarok_uid': types.STRING,
- 'amarok_playcount': types.INTEGER,
- 'amarok_firstplayed': DateType(),
- 'amarok_lastplayed': DateType(),
+ "amarok_rating": types.INTEGER,
+ "amarok_score": types.FLOAT,
+ "amarok_uid": types.STRING,
+ "amarok_playcount": types.INTEGER,
+ "amarok_firstplayed": DateType(),
+ "amarok_lastplayed": DateType(),
}
query_xml = ' \
@@ -57,10 +57,11 @@ def __init__(self, config, log):
super().__init__(config, log)
if not dbus:
- raise ImportError('failed to import dbus')
+ raise ImportError("failed to import dbus")
- self.collection = \
- dbus.SessionBus().get_object('org.kde.amarok', '/Collection')
+ self.collection = dbus.SessionBus().get_object(
+ "org.kde.amarok", "/Collection"
+ )
def sync_from_source(self, item):
path = displayable_path(item.path)
@@ -73,35 +74,36 @@ def sync_from_source(self, item):
self.query_xml % quoteattr(basename(path))
)
for result in results:
- if result['xesam:url'] != path:
+ if result["xesam:url"] != path:
continue
- item.amarok_rating = result['xesam:userRating']
- item.amarok_score = result['xesam:autoRating']
- item.amarok_playcount = result['xesam:useCount']
- item.amarok_uid = \
- result['xesam:id'].replace('amarok-sqltrackuid://', '')
+ item.amarok_rating = result["xesam:userRating"]
+ item.amarok_score = result["xesam:autoRating"]
+ item.amarok_playcount = result["xesam:useCount"]
+ item.amarok_uid = result["xesam:id"].replace(
+ "amarok-sqltrackuid://", ""
+ )
- if result['xesam:firstUsed'][0][0] != 0:
+ if result["xesam:firstUsed"][0][0] != 0:
# These dates are stored as timestamps in amarok's db, but
# exposed over dbus as fixed integers in the current timezone.
first_played = datetime(
- result['xesam:firstUsed'][0][0],
- result['xesam:firstUsed'][0][1],
- result['xesam:firstUsed'][0][2],
- result['xesam:firstUsed'][1][0],
- result['xesam:firstUsed'][1][1],
- result['xesam:firstUsed'][1][2]
+ result["xesam:firstUsed"][0][0],
+ result["xesam:firstUsed"][0][1],
+ result["xesam:firstUsed"][0][2],
+ result["xesam:firstUsed"][1][0],
+ result["xesam:firstUsed"][1][1],
+ result["xesam:firstUsed"][1][2],
)
- if result['xesam:lastUsed'][0][0] != 0:
+ if result["xesam:lastUsed"][0][0] != 0:
last_played = datetime(
- result['xesam:lastUsed'][0][0],
- result['xesam:lastUsed'][0][1],
- result['xesam:lastUsed'][0][2],
- result['xesam:lastUsed'][1][0],
- result['xesam:lastUsed'][1][1],
- result['xesam:lastUsed'][1][2]
+ result["xesam:lastUsed"][0][0],
+ result["xesam:lastUsed"][0][1],
+ result["xesam:lastUsed"][0][2],
+ result["xesam:lastUsed"][1][0],
+ result["xesam:lastUsed"][1][1],
+ result["xesam:lastUsed"][1][2],
)
else:
last_played = first_played
diff --git a/beetsplug/metasync/itunes.py b/beetsplug/metasync/itunes.py
index bc198b7bc5..15cbd7bb3d 100644
--- a/beetsplug/metasync/itunes.py
+++ b/beetsplug/metasync/itunes.py
@@ -16,27 +16,27 @@
"""
-from contextlib import contextmanager
import os
+import plistlib
import shutil
import tempfile
-import plistlib
-
-from urllib.parse import urlparse, unquote
+from contextlib import contextmanager
from time import mktime
+from urllib.parse import unquote, urlparse
+
+from confuse import ConfigValueError
from beets import util
from beets.dbcore import types
from beets.library import DateType
from beets.util import bytestring_path, syspath
-from confuse import ConfigValueError
from beetsplug.metasync import MetaSource
@contextmanager
def create_temporary_copy(path):
temp_dir = bytestring_path(tempfile.mkdtemp())
- temp_path = os.path.join(temp_dir, b'temp_itunes_lib')
+ temp_path = os.path.join(temp_dir, b"temp_itunes_lib")
shutil.copyfile(syspath(path), syspath(temp_path))
try:
yield temp_path
@@ -55,72 +55,74 @@ def _norm_itunes_path(path):
# which is unwanted in the case of Windows systems.
# E.g., '\\G:\\Music\\bar' needs to be stripped to 'G:\\Music\\bar'
- return util.bytestring_path(os.path.normpath(
- unquote(urlparse(path).path)).lstrip('\\')).lower()
+ return util.bytestring_path(
+ os.path.normpath(unquote(urlparse(path).path)).lstrip("\\")
+ ).lower()
class Itunes(MetaSource):
-
item_types = {
- 'itunes_rating': types.INTEGER, # 0..100 scale
- 'itunes_playcount': types.INTEGER,
- 'itunes_skipcount': types.INTEGER,
- 'itunes_lastplayed': DateType(),
- 'itunes_lastskipped': DateType(),
- 'itunes_dateadded': DateType(),
+ "itunes_rating": types.INTEGER, # 0..100 scale
+ "itunes_playcount": types.INTEGER,
+ "itunes_skipcount": types.INTEGER,
+ "itunes_lastplayed": DateType(),
+ "itunes_lastskipped": DateType(),
+ "itunes_dateadded": DateType(),
}
def __init__(self, config, log):
super().__init__(config, log)
- config.add({'itunes': {
- 'library': '~/Music/iTunes/iTunes Library.xml'
- }})
+ config.add({"itunes": {"library": "~/Music/iTunes/iTunes Library.xml"}})
# Load the iTunes library, which has to be the .xml one (not the .itl)
- library_path = config['itunes']['library'].as_filename()
+ library_path = config["itunes"]["library"].as_filename()
try:
- self._log.debug(
- f'loading iTunes library from {library_path}')
+ self._log.debug(f"loading iTunes library from {library_path}")
with create_temporary_copy(library_path) as library_copy:
- with open(library_copy, 'rb') as library_copy_f:
+ with open(library_copy, "rb") as library_copy_f:
raw_library = plistlib.load(library_copy_f)
except OSError as e:
- raise ConfigValueError('invalid iTunes library: ' + e.strerror)
+ raise ConfigValueError("invalid iTunes library: " + e.strerror)
except Exception:
# It's likely the user configured their '.itl' library (<> xml)
- if os.path.splitext(library_path)[1].lower() != '.xml':
- hint = ': please ensure that the configured path' \
- ' points to the .XML library'
+ if os.path.splitext(library_path)[1].lower() != ".xml":
+ hint = (
+ ": please ensure that the configured path"
+ " points to the .XML library"
+ )
else:
- hint = ''
- raise ConfigValueError('invalid iTunes library' + hint)
+ hint = ""
+ raise ConfigValueError("invalid iTunes library" + hint)
# Make the iTunes library queryable using the path
- self.collection = {_norm_itunes_path(track['Location']): track
- for track in raw_library['Tracks'].values()
- if 'Location' in track}
+ self.collection = {
+ _norm_itunes_path(track["Location"]): track
+ for track in raw_library["Tracks"].values()
+ if "Location" in track
+ }
def sync_from_source(self, item):
result = self.collection.get(util.bytestring_path(item.path).lower())
if not result:
- self._log.warning(f'no iTunes match found for {item}')
+ self._log.warning(f"no iTunes match found for {item}")
return
- item.itunes_rating = result.get('Rating')
- item.itunes_playcount = result.get('Play Count')
- item.itunes_skipcount = result.get('Skip Count')
+ item.itunes_rating = result.get("Rating")
+ item.itunes_playcount = result.get("Play Count")
+ item.itunes_skipcount = result.get("Skip Count")
- if result.get('Play Date UTC'):
+ if result.get("Play Date UTC"):
item.itunes_lastplayed = mktime(
- result.get('Play Date UTC').timetuple())
+ result.get("Play Date UTC").timetuple()
+ )
- if result.get('Skip Date'):
+ if result.get("Skip Date"):
item.itunes_lastskipped = mktime(
- result.get('Skip Date').timetuple())
+ result.get("Skip Date").timetuple()
+ )
- if result.get('Date Added'):
- item.itunes_dateadded = mktime(
- result.get('Date Added').timetuple())
+ if result.get("Date Added"):
+ item.itunes_dateadded = mktime(result.get("Date Added").timetuple())
diff --git a/beetsplug/missing.py b/beetsplug/missing.py
index 771978c1b7..2e37fde788 100644
--- a/beetsplug/missing.py
+++ b/beetsplug/missing.py
@@ -16,21 +16,21 @@
"""List missing tracks.
"""
-import musicbrainzngs
+from collections import defaultdict
+import musicbrainzngs
from musicbrainzngs.musicbrainz import MusicBrainzError
-from collections import defaultdict
+
+from beets import config
from beets.autotag import hooks
+from beets.dbcore import types
from beets.library import Item
from beets.plugins import BeetsPlugin
-from beets.ui import decargs, print_, Subcommand
-from beets import config
-from beets.dbcore import types
+from beets.ui import Subcommand, decargs, print_
def _missing_count(album):
- """Return number of missing items in `album`.
- """
+ """Return number of missing items in `album`."""
return (album.albumtotal or 0) - len(album.items())
@@ -45,80 +45,93 @@ def _item(track_info, album_info, album_id):
t = track_info
a = album_info
- return Item(**{
- 'album_id': album_id,
- 'album': a.album,
- 'albumartist': a.artist,
- 'albumartist_credit': a.artist_credit,
- 'albumartist_sort': a.artist_sort,
- 'albumdisambig': a.albumdisambig,
- 'albumstatus': a.albumstatus,
- 'albumtype': a.albumtype,
- 'artist': t.artist,
- 'artist_credit': t.artist_credit,
- 'artist_sort': t.artist_sort,
- 'asin': a.asin,
- 'catalognum': a.catalognum,
- 'comp': a.va,
- 'country': a.country,
- 'day': a.day,
- 'disc': t.medium,
- 'disctitle': t.disctitle,
- 'disctotal': a.mediums,
- 'label': a.label,
- 'language': a.language,
- 'length': t.length,
- 'mb_albumid': a.album_id,
- 'mb_artistid': t.artist_id,
- 'mb_releasegroupid': a.releasegroup_id,
- 'mb_trackid': t.track_id,
- 'media': t.media,
- 'month': a.month,
- 'script': a.script,
- 'title': t.title,
- 'track': t.index,
- 'tracktotal': len(a.tracks),
- 'year': a.year,
- })
+ return Item(
+ **{
+ "album_id": album_id,
+ "album": a.album,
+ "albumartist": a.artist,
+ "albumartist_credit": a.artist_credit,
+ "albumartist_sort": a.artist_sort,
+ "albumdisambig": a.albumdisambig,
+ "albumstatus": a.albumstatus,
+ "albumtype": a.albumtype,
+ "artist": t.artist,
+ "artist_credit": t.artist_credit,
+ "artist_sort": t.artist_sort,
+ "asin": a.asin,
+ "catalognum": a.catalognum,
+ "comp": a.va,
+ "country": a.country,
+ "day": a.day,
+ "disc": t.medium,
+ "disctitle": t.disctitle,
+ "disctotal": a.mediums,
+ "label": a.label,
+ "language": a.language,
+ "length": t.length,
+ "mb_albumid": a.album_id,
+ "mb_artistid": t.artist_id,
+ "mb_releasegroupid": a.releasegroup_id,
+ "mb_trackid": t.track_id,
+ "media": t.media,
+ "month": a.month,
+ "script": a.script,
+ "title": t.title,
+ "track": t.index,
+ "tracktotal": len(a.tracks),
+ "year": a.year,
+ }
+ )
class MissingPlugin(BeetsPlugin):
- """List missing tracks
- """
+ """List missing tracks"""
album_types = {
- 'missing': types.INTEGER,
+ "missing": types.INTEGER,
}
def __init__(self):
super().__init__()
- self.config.add({
- 'count': False,
- 'total': False,
- 'album': False,
- })
+ self.config.add(
+ {
+ "count": False,
+ "total": False,
+ "album": False,
+ }
+ )
- self.album_template_fields['missing'] = _missing_count
+ self.album_template_fields["missing"] = _missing_count
- self._command = Subcommand('missing',
- help=__doc__,
- aliases=['miss'])
+ self._command = Subcommand("missing", help=__doc__, aliases=["miss"])
self._command.parser.add_option(
- '-c', '--count', dest='count', action='store_true',
- help='count missing tracks per album')
+ "-c",
+ "--count",
+ dest="count",
+ action="store_true",
+ help="count missing tracks per album",
+ )
self._command.parser.add_option(
- '-t', '--total', dest='total', action='store_true',
- help='count total of missing tracks')
+ "-t",
+ "--total",
+ dest="total",
+ action="store_true",
+ help="count total of missing tracks",
+ )
self._command.parser.add_option(
- '-a', '--album', dest='album', action='store_true',
- help='show missing albums for artist instead of tracks')
+ "-a",
+ "--album",
+ dest="album",
+ action="store_true",
+ help="show missing albums for artist instead of tracks",
+ )
self._command.parser.add_format_option()
def commands(self):
def _miss(lib, opts, args):
self.config.set_args(opts)
- albms = self.config['album'].get()
+ albms = self.config["album"].get()
helper = self._missing_albums if albms else self._missing_tracks
helper(lib, decargs(args))
@@ -132,9 +145,9 @@ def _missing_tracks(self, lib, query):
"""
albums = lib.albums(query)
- count = self.config['count'].get()
- total = self.config['total'].get()
- fmt = config['format_album' if count else 'format_item'].get()
+ count = self.config["count"].get()
+ total = self.config["total"].get()
+ fmt = config["format_album" if count else "format_item"].get()
if total:
print(sum([_missing_count(a) for a in albums]))
@@ -142,7 +155,7 @@ def _missing_tracks(self, lib, query):
# Default format string for count mode.
if count:
- fmt += ': $missing'
+ fmt += ": $missing"
for album in albums:
if count:
@@ -157,13 +170,13 @@ def _missing_albums(self, lib, query):
"""Print a listing of albums missing from each artist in the library
matching query.
"""
- total = self.config['total'].get()
+ total = self.config["total"].get()
albums = lib.albums(query)
# build dict mapping artist to list of their albums in library
albums_by_artist = defaultdict(list)
for alb in albums:
- artist = (alb['albumartist'], alb['mb_albumartistid'])
+ artist = (alb["albumartist"], alb["mb_albumartistid"])
albums_by_artist[artist].append(alb)
total_missing = 0
@@ -171,20 +184,24 @@ def _missing_albums(self, lib, query):
# build dict mapping artist to list of all albums
for artist, albums in albums_by_artist.items():
if artist[1] is None or artist[1] == "":
- albs_no_mbid = ["'" + a['album'] + "'" for a in albums]
+ albs_no_mbid = ["'" + a["album"] + "'" for a in albums]
self._log.info(
"No musicbrainz ID for artist '{}' found in album(s) {}; "
- "skipping", artist[0], ", ".join(albs_no_mbid)
+ "skipping",
+ artist[0],
+ ", ".join(albs_no_mbid),
)
continue
try:
resp = musicbrainzngs.browse_release_groups(artist=artist[1])
- release_groups = resp['release-group-list']
+ release_groups = resp["release-group-list"]
except MusicBrainzError as err:
self._log.info(
"Couldn't fetch info for artist '{}' ({}) - '{}'",
- artist[0], artist[1], err
+ artist[0],
+ artist[1],
+ err,
)
continue
@@ -193,7 +210,7 @@ def _missing_albums(self, lib, query):
for rg in release_groups:
missing.append(rg)
for alb in albums:
- if alb['mb_releasegroupid'] == rg['id']:
+ if alb["mb_releasegroupid"] == rg["id"]:
missing.remove(rg)
present.append(rg)
break
@@ -202,7 +219,7 @@ def _missing_albums(self, lib, query):
if total:
continue
- missing_titles = {rg['title'] for rg in missing}
+ missing_titles = {rg["title"] for rg in missing}
for release_title in missing_titles:
print_("{} - {}".format(artist[0], release_title))
@@ -211,16 +228,18 @@ def _missing_albums(self, lib, query):
print(total_missing)
def _missing(self, album):
- """Query MusicBrainz to determine items missing from `album`.
- """
+ """Query MusicBrainz to determine items missing from `album`."""
item_mbids = [x.mb_trackid for x in album.items()]
if len(list(album.items())) < album.albumtotal:
# fetch missing items
# TODO: Implement caching that without breaking other stuff
album_info = hooks.album_for_mbid(album.mb_albumid)
- for track_info in getattr(album_info, 'tracks', []):
+ for track_info in getattr(album_info, "tracks", []):
if track_info.track_id not in item_mbids:
item = _item(track_info, album_info, album.id)
- self._log.debug('track {0} in album {1}',
- track_info.track_id, album_info.album_id)
+ self._log.debug(
+ "track {0} in album {1}",
+ track_info.track_id,
+ album_info.album_id,
+ )
yield item
diff --git a/beetsplug/mpdstats.py b/beetsplug/mpdstats.py
index 96291cf4c7..6d4c269d14 100644
--- a/beetsplug/mpdstats.py
+++ b/beetsplug/mpdstats.py
@@ -13,16 +13,14 @@
# included in all copies or substantial portions of the Software.
-import mpd
-import time
import os
+import time
-from beets import ui
-from beets import config
-from beets import plugins
-from beets import library
-from beets.util import displayable_path
+import mpd
+
+from beets import config, library, plugins, ui
from beets.dbcore import types
+from beets.util import displayable_path
# If we lose the connection, how many times do we want to retry and how
# much time should we wait between retries?
@@ -30,60 +28,55 @@
RETRY_INTERVAL = 5
-mpd_config = config['mpd']
+mpd_config = config["mpd"]
def is_url(path):
- """Try to determine if the path is an URL.
- """
+ """Try to determine if the path is an URL."""
if isinstance(path, bytes): # if it's bytes, then it's a path
return False
- return path.split('://', 1)[0] in ['http', 'https']
+ return path.split("://", 1)[0] in ["http", "https"]
class MPDClientWrapper:
def __init__(self, log):
self._log = log
- self.music_directory = mpd_config['music_directory'].as_str()
- self.strip_path = mpd_config['strip_path'].as_str()
+ self.music_directory = mpd_config["music_directory"].as_str()
+ self.strip_path = mpd_config["strip_path"].as_str()
# Ensure strip_path end with '/'
- if not self.strip_path.endswith('/'):
- self.strip_path += '/'
+ if not self.strip_path.endswith("/"):
+ self.strip_path += "/"
- self._log.debug('music_directory: {0}', self.music_directory)
- self._log.debug('strip_path: {0}', self.strip_path)
+ self._log.debug("music_directory: {0}", self.music_directory)
+ self._log.debug("strip_path: {0}", self.strip_path)
self.client = mpd.MPDClient()
def connect(self):
- """Connect to the MPD.
- """
- host = mpd_config['host'].as_str()
- port = mpd_config['port'].get(int)
+ """Connect to the MPD."""
+ host = mpd_config["host"].as_str()
+ port = mpd_config["port"].get(int)
- if host[0] in ['/', '~']:
+ if host[0] in ["/", "~"]:
host = os.path.expanduser(host)
- self._log.info('connecting to {0}:{1}', host, port)
+ self._log.info("connecting to {0}:{1}", host, port)
try:
self.client.connect(host, port)
except OSError as e:
- raise ui.UserError(f'could not connect to MPD: {e}')
+ raise ui.UserError(f"could not connect to MPD: {e}")
- password = mpd_config['password'].as_str()
+ password = mpd_config["password"].as_str()
if password:
try:
self.client.password(password)
except mpd.CommandError as e:
- raise ui.UserError(
- f'could not authenticate to MPD: {e}'
- )
+ raise ui.UserError(f"could not authenticate to MPD: {e}")
def disconnect(self):
- """Disconnect from the MPD.
- """
+ """Disconnect from the MPD."""
self.client.close()
self.client.disconnect()
@@ -94,11 +87,11 @@ def get(self, command, retries=RETRIES):
try:
return getattr(self.client, command)()
except (OSError, mpd.ConnectionError) as err:
- self._log.error('{0}', err)
+ self._log.error("{0}", err)
if retries <= 0:
# if we exited without breaking, we couldn't reconnect in time :(
- raise ui.UserError('communication with MPD server failed')
+ raise ui.UserError("communication with MPD server failed")
time.sleep(RETRY_INTERVAL)
@@ -119,28 +112,27 @@ def currentsong(self):
`strip_path` defaults to ''.
"""
result = None
- entry = self.get('currentsong')
- if 'file' in entry:
- if not is_url(entry['file']):
- file = entry['file']
+ entry = self.get("currentsong")
+ if "file" in entry:
+ if not is_url(entry["file"]):
+ file = entry["file"]
if file.startswith(self.strip_path):
- file = file[len(self.strip_path):]
+ file = file[len(self.strip_path) :]
result = os.path.join(self.music_directory, file)
else:
- result = entry['file']
- self._log.debug('returning: {0}', result)
- return result, entry.get('id')
+ result = entry["file"]
+ self._log.debug("returning: {0}", result)
+ return result, entry.get("id")
def status(self):
- """Return the current status of the MPD.
- """
- return self.get('status')
+ """Return the current status of the MPD."""
+ return self.get("status")
def events(self):
"""Return list of events. This may block a long time while waiting for
an answer from MPD.
"""
- return self.get('idle')
+ return self.get("idle")
class MPDStats:
@@ -148,8 +140,8 @@ def __init__(self, lib, log):
self.lib = lib
self._log = log
- self.do_rating = mpd_config['rating'].get(bool)
- self.rating_mix = mpd_config['rating_mix'].get(float)
+ self.do_rating = mpd_config["rating"].get(bool)
+ self.rating_mix = mpd_config["rating_mix"].get(float)
self.time_threshold = 10.0 # TODO: maybe add config option?
self.now_playing = None
@@ -160,22 +152,20 @@ def rating(self, play_count, skip_count, rating, skipped):
old rating and the fact if it was skipped or not.
"""
if skipped:
- rolling = (rating - rating / 2.0)
+ rolling = rating - rating / 2.0
else:
- rolling = (rating + (1.0 - rating) / 2.0)
+ rolling = rating + (1.0 - rating) / 2.0
stable = (play_count + 1.0) / (play_count + skip_count + 2.0)
- return (self.rating_mix * stable +
- (1.0 - self.rating_mix) * rolling)
+ return self.rating_mix * stable + (1.0 - self.rating_mix) * rolling
def get_item(self, path):
- """Return the beets item related to path.
- """
- query = library.PathQuery('path', path)
+ """Return the beets item related to path."""
+ query = library.PathQuery("path", path)
item = self.lib.items(query).get()
if item:
return item
else:
- self._log.info('item not found: {0}', displayable_path(path))
+ self._log.info("item not found: {0}", displayable_path(path))
def update_item(self, item, attribute, value=None, increment=None):
"""Update the beets item. Set attribute to value or increment the value
@@ -193,10 +183,12 @@ def update_item(self, item, attribute, value=None, increment=None):
item[attribute] = value
item.store()
- self._log.debug('updated: {0} = {1} [{2}]',
- attribute,
- item[attribute],
- displayable_path(item.path))
+ self._log.debug(
+ "updated: {0} = {1} [{2}]",
+ attribute,
+ item[attribute],
+ displayable_path(item.path),
+ )
def update_rating(self, item, skipped):
"""Update the rating for a beets item. The `item` can either be a
@@ -207,12 +199,13 @@ def update_rating(self, item, skipped):
item.load()
rating = self.rating(
- int(item.get('play_count', 0)),
- int(item.get('skip_count', 0)),
- float(item.get('rating', 0.5)),
- skipped)
+ int(item.get("play_count", 0)),
+ int(item.get("skip_count", 0)),
+ float(item.get("rating", 0.5)),
+ skipped,
+ )
- self.update_item(item, 'rating', rating)
+ self.update_item(item, "rating", rating)
def handle_song_change(self, song):
"""Determine if a song was skipped or not and update its attributes.
@@ -222,7 +215,7 @@ def handle_song_change(self, song):
Returns whether the change was manual (skipped previous song or not)
"""
- diff = abs(song['remaining'] - (time.time() - song['started']))
+ diff = abs(song["remaining"] - (time.time() - song["started"]))
skipped = diff >= self.time_threshold
@@ -232,89 +225,89 @@ def handle_song_change(self, song):
self.handle_played(song)
if self.do_rating:
- self.update_rating(song['beets_item'], skipped)
+ self.update_rating(song["beets_item"], skipped)
return skipped
def handle_played(self, song):
- """Updates the play count of a song.
- """
- self.update_item(song['beets_item'], 'play_count', increment=1)
- self._log.info('played {0}', displayable_path(song['path']))
+ """Updates the play count of a song."""
+ self.update_item(song["beets_item"], "play_count", increment=1)
+ self._log.info("played {0}", displayable_path(song["path"]))
def handle_skipped(self, song):
- """Updates the skip count of a song.
- """
- self.update_item(song['beets_item'], 'skip_count', increment=1)
- self._log.info('skipped {0}', displayable_path(song['path']))
+ """Updates the skip count of a song."""
+ self.update_item(song["beets_item"], "skip_count", increment=1)
+ self._log.info("skipped {0}", displayable_path(song["path"]))
def on_stop(self, status):
- self._log.info('stop')
+ self._log.info("stop")
# if the current song stays the same it means that we stopped on the
# current track and should not record a skip.
- if self.now_playing and self.now_playing['id'] != status.get('songid'):
+ if self.now_playing and self.now_playing["id"] != status.get("songid"):
self.handle_song_change(self.now_playing)
self.now_playing = None
def on_pause(self, status):
- self._log.info('pause')
+ self._log.info("pause")
self.now_playing = None
def on_play(self, status):
-
path, songid = self.mpd.currentsong()
if not path:
return
- played, duration = map(int, status['time'].split(':', 1))
+ played, duration = map(int, status["time"].split(":", 1))
remaining = duration - played
if self.now_playing:
- if self.now_playing['path'] != path:
+ if self.now_playing["path"] != path:
self.handle_song_change(self.now_playing)
else:
# In case we got mpd play event with same song playing
# multiple times,
# assume low diff means redundant second play event
# after natural song start.
- diff = abs(time.time() - self.now_playing['started'])
+ diff = abs(time.time() - self.now_playing["started"])
if diff <= self.time_threshold:
return
- if self.now_playing['path'] == path and played == 0:
+ if self.now_playing["path"] == path and played == 0:
self.handle_song_change(self.now_playing)
if is_url(path):
- self._log.info('playing stream {0}', displayable_path(path))
+ self._log.info("playing stream {0}", displayable_path(path))
self.now_playing = None
return
- self._log.info('playing {0}', displayable_path(path))
+ self._log.info("playing {0}", displayable_path(path))
self.now_playing = {
- 'started': time.time(),
- 'remaining': remaining,
- 'path': path,
- 'id': songid,
- 'beets_item': self.get_item(path),
+ "started": time.time(),
+ "remaining": remaining,
+ "path": path,
+ "id": songid,
+ "beets_item": self.get_item(path),
}
- self.update_item(self.now_playing['beets_item'],
- 'last_played', value=int(time.time()))
+ self.update_item(
+ self.now_playing["beets_item"],
+ "last_played",
+ value=int(time.time()),
+ )
def run(self):
self.mpd.connect()
- events = ['player']
+ events = ["player"]
while True:
- if 'player' in events:
+ if "player" in events:
status = self.mpd.status()
- handler = getattr(self, 'on_' + status['state'], None)
+ handler = getattr(self, "on_" + status["state"], None)
if handler:
handler(status)
@@ -325,51 +318,61 @@ def run(self):
class MPDStatsPlugin(plugins.BeetsPlugin):
-
item_types = {
- 'play_count': types.INTEGER,
- 'skip_count': types.INTEGER,
- 'last_played': library.DateType(),
- 'rating': types.FLOAT,
+ "play_count": types.INTEGER,
+ "skip_count": types.INTEGER,
+ "last_played": library.DateType(),
+ "rating": types.FLOAT,
}
def __init__(self):
super().__init__()
- mpd_config.add({
- 'music_directory': config['directory'].as_filename(),
- 'strip_path': '',
- 'rating': True,
- 'rating_mix': 0.75,
- 'host': os.environ.get('MPD_HOST', 'localhost'),
- 'port': int(os.environ.get('MPD_PORT', 6600)),
- 'password': '',
- })
- mpd_config['password'].redact = True
+ mpd_config.add(
+ {
+ "music_directory": config["directory"].as_filename(),
+ "strip_path": "",
+ "rating": True,
+ "rating_mix": 0.75,
+ "host": os.environ.get("MPD_HOST", "localhost"),
+ "port": int(os.environ.get("MPD_PORT", 6600)),
+ "password": "",
+ }
+ )
+ mpd_config["password"].redact = True
def commands(self):
cmd = ui.Subcommand(
- 'mpdstats',
- help='run a MPD client to gather play statistics')
+ "mpdstats", help="run a MPD client to gather play statistics"
+ )
cmd.parser.add_option(
- '--host', dest='host', type='string',
- help='set the hostname of the server to connect to')
+ "--host",
+ dest="host",
+ type="string",
+ help="set the hostname of the server to connect to",
+ )
cmd.parser.add_option(
- '--port', dest='port', type='int',
- help='set the port of the MPD server to connect to')
+ "--port",
+ dest="port",
+ type="int",
+ help="set the port of the MPD server to connect to",
+ )
cmd.parser.add_option(
- '--password', dest='password', type='string',
- help='set the password of the MPD server to connect to')
+ "--password",
+ dest="password",
+ type="string",
+ help="set the password of the MPD server to connect to",
+ )
def func(lib, opts, args):
mpd_config.set_args(opts)
# Overrides for MPD settings.
if opts.host:
- mpd_config['host'] = opts.host.decode('utf-8')
+ mpd_config["host"] = opts.host.decode("utf-8")
if opts.port:
- mpd_config['host'] = int(opts.port)
+ mpd_config["host"] = int(opts.port)
if opts.password:
- mpd_config['password'] = opts.password.decode('utf-8')
+ mpd_config["password"] = opts.password.decode("utf-8")
try:
MPDStats(lib, self._log).run()
diff --git a/beetsplug/mpdupdate.py b/beetsplug/mpdupdate.py
index e5264e1829..cb53afaa57 100644
--- a/beetsplug/mpdupdate.py
+++ b/beetsplug/mpdupdate.py
@@ -21,10 +21,11 @@
password: seekrit
"""
-from beets.plugins import BeetsPlugin
import os
import socket
+
from beets import config
+from beets.plugins import BeetsPlugin
# No need to introduce a dependency on an MPD library for such a
@@ -32,14 +33,15 @@
# easier.
class BufferedSocket:
"""Socket abstraction that allows reading by line."""
- def __init__(self, host, port, sep=b'\n'):
- if host[0] in ['/', '~']:
+
+ def __init__(self, host, port, sep=b"\n"):
+ if host[0] in ["/", "~"]:
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.connect(os.path.expanduser(host))
else:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((host, port))
- self.buf = b''
+ self.buf = b""
self.sep = sep
def readline(self):
@@ -52,7 +54,7 @@ def readline(self):
res, self.buf = self.buf.split(self.sep, 1)
return res + self.sep
else:
- return b''
+ return b""
def send(self, data):
self.sock.send(data)
@@ -64,63 +66,64 @@ def close(self):
class MPDUpdatePlugin(BeetsPlugin):
def __init__(self):
super().__init__()
- config['mpd'].add({
- 'host': os.environ.get('MPD_HOST', 'localhost'),
- 'port': int(os.environ.get('MPD_PORT', 6600)),
- 'password': '',
- })
- config['mpd']['password'].redact = True
+ config["mpd"].add(
+ {
+ "host": os.environ.get("MPD_HOST", "localhost"),
+ "port": int(os.environ.get("MPD_PORT", 6600)),
+ "password": "",
+ }
+ )
+ config["mpd"]["password"].redact = True
# For backwards compatibility, use any values from the
# plugin-specific "mpdupdate" section.
- for key in config['mpd'].keys():
+ for key in config["mpd"].keys():
if self.config[key].exists():
- config['mpd'][key] = self.config[key].get()
+ config["mpd"][key] = self.config[key].get()
- self.register_listener('database_change', self.db_change)
+ self.register_listener("database_change", self.db_change)
def db_change(self, lib, model):
- self.register_listener('cli_exit', self.update)
+ self.register_listener("cli_exit", self.update)
def update(self, lib):
self.update_mpd(
- config['mpd']['host'].as_str(),
- config['mpd']['port'].get(int),
- config['mpd']['password'].as_str(),
+ config["mpd"]["host"].as_str(),
+ config["mpd"]["port"].get(int),
+ config["mpd"]["password"].as_str(),
)
- def update_mpd(self, host='localhost', port=6600, password=None):
+ def update_mpd(self, host="localhost", port=6600, password=None):
"""Sends the "update" command to the MPD server indicated,
possibly authenticating with a password first.
"""
- self._log.info('Updating MPD database...')
+ self._log.info("Updating MPD database...")
try:
s = BufferedSocket(host, port)
except OSError as e:
- self._log.warning('MPD connection failed: {0}',
- str(e.strerror))
+ self._log.warning("MPD connection failed: {0}", str(e.strerror))
return
resp = s.readline()
- if b'OK MPD' not in resp:
- self._log.warning('MPD connection failed: {0!r}', resp)
+ if b"OK MPD" not in resp:
+ self._log.warning("MPD connection failed: {0!r}", resp)
return
if password:
- s.send(b'password "%s"\n' % password.encode('utf8'))
+ s.send(b'password "%s"\n' % password.encode("utf8"))
resp = s.readline()
- if b'OK' not in resp:
- self._log.warning('Authentication failed: {0!r}', resp)
- s.send(b'close\n')
+ if b"OK" not in resp:
+ self._log.warning("Authentication failed: {0!r}", resp)
+ s.send(b"close\n")
s.close()
return
- s.send(b'update\n')
+ s.send(b"update\n")
resp = s.readline()
- if b'updating_db' not in resp:
- self._log.warning('Update failed: {0!r}', resp)
+ if b"updating_db" not in resp:
+ self._log.warning("Update failed: {0!r}", resp)
- s.send(b'close\n')
+ s.send(b"close\n")
s.close()
- self._log.info('Database updated.')
+ self._log.info("Database updated.")
diff --git a/beetsplug/parentwork.py b/beetsplug/parentwork.py
index 75307b8ff8..4ddef1c14e 100644
--- a/beetsplug/parentwork.py
+++ b/beetsplug/parentwork.py
@@ -17,37 +17,38 @@
"""
+import musicbrainzngs
+
from beets import ui
from beets.plugins import BeetsPlugin
-import musicbrainzngs
-
def direct_parent_id(mb_workid, work_date=None):
"""Given a Musicbrainz work id, find the id one of the works the work is
part of and the first composition date it encounters.
"""
- work_info = musicbrainzngs.get_work_by_id(mb_workid,
- includes=["work-rels",
- "artist-rels"])
- if 'artist-relation-list' in work_info['work'] and work_date is None:
- for artist in work_info['work']['artist-relation-list']:
- if artist['type'] == 'composer':
- if 'end' in artist.keys():
- work_date = artist['end']
-
- if 'work-relation-list' in work_info['work']:
- for direct_parent in work_info['work']['work-relation-list']:
- if direct_parent['type'] == 'parts' \
- and direct_parent.get('direction') == 'backward':
- direct_id = direct_parent['work']['id']
+ work_info = musicbrainzngs.get_work_by_id(
+ mb_workid, includes=["work-rels", "artist-rels"]
+ )
+ if "artist-relation-list" in work_info["work"] and work_date is None:
+ for artist in work_info["work"]["artist-relation-list"]:
+ if artist["type"] == "composer":
+ if "end" in artist.keys():
+ work_date = artist["end"]
+
+ if "work-relation-list" in work_info["work"]:
+ for direct_parent in work_info["work"]["work-relation-list"]:
+ if (
+ direct_parent["type"] == "parts"
+ and direct_parent.get("direction") == "backward"
+ ):
+ direct_id = direct_parent["work"]["id"]
return direct_id, work_date
return None, work_date
def work_parent_id(mb_workid):
- """Find the parent work id and composition date of a work given its id.
- """
+ """Find the parent work id and composition date of a work given its id."""
work_date = None
while True:
new_mb_workid, work_date = direct_parent_id(mb_workid, work_date)
@@ -62,8 +63,9 @@ def find_parentwork_info(mb_workid):
the artist relations, and the composition date for a work's parent work.
"""
parent_id, work_date = work_parent_id(mb_workid)
- work_info = musicbrainzngs.get_work_by_id(parent_id,
- includes=["artist-rels"])
+ work_info = musicbrainzngs.get_work_by_id(
+ parent_id, includes=["artist-rels"]
+ )
return work_info, work_date
@@ -71,19 +73,20 @@ class ParentWorkPlugin(BeetsPlugin):
def __init__(self):
super().__init__()
- self.config.add({
- 'auto': False,
- 'force': False,
- })
+ self.config.add(
+ {
+ "auto": False,
+ "force": False,
+ }
+ )
- if self.config['auto']:
+ if self.config["auto"]:
self.import_stages = [self.imported]
def commands(self):
-
def func(lib, opts, args):
self.config.set_args(opts)
- force_parent = self.config['force'].get(bool)
+ force_parent = self.config["force"].get(bool)
write = ui.should_write()
for item in lib.items(ui.decargs(args)):
@@ -92,22 +95,26 @@ def func(lib, opts, args):
item.store()
if write:
item.try_write()
+
command = ui.Subcommand(
- 'parentwork',
- help='fetch parent works, composers and dates')
+ "parentwork", help="fetch parent works, composers and dates"
+ )
command.parser.add_option(
- '-f', '--force', dest='force',
- action='store_true', default=None,
- help='re-fetch when parent work is already present')
+ "-f",
+ "--force",
+ dest="force",
+ action="store_true",
+ default=None,
+ help="re-fetch when parent work is already present",
+ )
command.func = func
return [command]
def imported(self, session, task):
- """Import hook for fetching parent works automatically.
- """
- force_parent = self.config['force'].get(bool)
+ """Import hook for fetching parent works automatically."""
+ force_parent = self.config["force"].get(bool)
for item in task.imported_items():
self.find_work(item, force_parent)
@@ -124,35 +131,38 @@ def get_info(self, item, work_info):
parentwork_info = {}
composer_exists = False
- if 'artist-relation-list' in work_info['work']:
- for artist in work_info['work']['artist-relation-list']:
- if artist['type'] == 'composer':
+ if "artist-relation-list" in work_info["work"]:
+ for artist in work_info["work"]["artist-relation-list"]:
+ if artist["type"] == "composer":
composer_exists = True
- parent_composer.append(artist['artist']['name'])
- parent_composer_sort.append(artist['artist']['sort-name'])
- if 'end' in artist.keys():
- parentwork_info["parentwork_date"] = artist['end']
-
- parentwork_info['parent_composer'] = ', '.join(parent_composer)
- parentwork_info['parent_composer_sort'] = ', '.join(
- parent_composer_sort)
+ parent_composer.append(artist["artist"]["name"])
+ parent_composer_sort.append(artist["artist"]["sort-name"])
+ if "end" in artist.keys():
+ parentwork_info["parentwork_date"] = artist["end"]
+
+ parentwork_info["parent_composer"] = ", ".join(parent_composer)
+ parentwork_info["parent_composer_sort"] = ", ".join(
+ parent_composer_sort
+ )
if not composer_exists:
self._log.debug(
- 'no composer for {}; add one at '
- 'https://musicbrainz.org/work/{}',
- item, work_info['work']['id'],
+ "no composer for {}; add one at "
+ "https://musicbrainz.org/work/{}",
+ item,
+ work_info["work"]["id"],
)
- parentwork_info['parentwork'] = work_info['work']['title']
- parentwork_info['mb_parentworkid'] = work_info['work']['id']
+ parentwork_info["parentwork"] = work_info["work"]["title"]
+ parentwork_info["mb_parentworkid"] = work_info["work"]["id"]
- if 'disambiguation' in work_info['work']:
- parentwork_info['parentwork_disambig'] = work_info[
- 'work']['disambiguation']
+ if "disambiguation" in work_info["work"]:
+ parentwork_info["parentwork_disambig"] = work_info["work"][
+ "disambiguation"
+ ]
else:
- parentwork_info['parentwork_disambig'] = None
+ parentwork_info["parentwork_disambig"] = None
return parentwork_info
@@ -169,13 +179,17 @@ def find_work(self, item, force):
"""
if not item.mb_workid:
- self._log.info('No work for {}, \
-add one at https://musicbrainz.org/recording/{}', item, item.mb_trackid)
+ self._log.info(
+ "No work for {}, \
+add one at https://musicbrainz.org/recording/{}",
+ item,
+ item.mb_trackid,
+ )
return
- hasparent = hasattr(item, 'parentwork')
+ hasparent = hasattr(item, "parentwork")
work_changed = True
- if hasattr(item, 'parentwork_workid_current'):
+ if hasattr(item, "parentwork_workid_current"):
work_changed = item.parentwork_workid_current != item.mb_workid
if force or not hasparent or work_changed:
try:
@@ -184,14 +198,18 @@ def find_work(self, item, force):
self._log.debug("error fetching work: {}", e)
return
parent_info = self.get_info(item, work_info)
- parent_info['parentwork_workid_current'] = item.mb_workid
- if 'parent_composer' in parent_info:
- self._log.debug("Work fetched: {} - {}",
- parent_info['parentwork'],
- parent_info['parent_composer'])
+ parent_info["parentwork_workid_current"] = item.mb_workid
+ if "parent_composer" in parent_info:
+ self._log.debug(
+ "Work fetched: {} - {}",
+ parent_info["parentwork"],
+ parent_info["parent_composer"],
+ )
else:
- self._log.debug("Work fetched: {} - no parent composer",
- parent_info['parentwork'])
+ self._log.debug(
+ "Work fetched: {} - no parent composer",
+ parent_info["parentwork"],
+ )
elif hasparent:
self._log.debug("{}: Work present, skipping", item)
@@ -203,9 +221,17 @@ def find_work(self, item, force):
item[key] = value
if work_date:
- item['work_date'] = work_date
+ item["work_date"] = work_date
return ui.show_model_changes(
- item, fields=['parentwork', 'parentwork_disambig',
- 'mb_parentworkid', 'parent_composer',
- 'parent_composer_sort', 'work_date',
- 'parentwork_workid_current', 'parentwork_date'])
+ item,
+ fields=[
+ "parentwork",
+ "parentwork_disambig",
+ "mb_parentworkid",
+ "parent_composer",
+ "parent_composer_sort",
+ "work_date",
+ "parentwork_workid_current",
+ "parentwork_date",
+ ],
+ )
diff --git a/beetsplug/permissions.py b/beetsplug/permissions.py
index 6fe3aabe52..191c270191 100644
--- a/beetsplug/permissions.py
+++ b/beetsplug/permissions.py
@@ -7,6 +7,7 @@
"""
import os
import stat
+
from beets import config
from beets.plugins import BeetsPlugin
from beets.util import ancestry, displayable_path, syspath
@@ -35,20 +36,19 @@ def assert_permissions(path, permission, log):
`check_permissions`.
"""
if not check_permissions(path, permission):
- log.warning('could not set permissions on {}', displayable_path(path))
+ log.warning("could not set permissions on {}", displayable_path(path))
log.debug(
- 'set permissions to {}, but permissions are now {}',
+ "set permissions to {}, but permissions are now {}",
permission,
os.stat(syspath(path)).st_mode & 0o777,
)
def dirs_in_library(library, item):
- """Creates a list of ancestor directories in the beets library path.
- """
- return [ancestor
- for ancestor in ancestry(item)
- if ancestor.startswith(library)][1:]
+ """Creates a list of ancestor directories in the beets library path."""
+ return [
+ ancestor for ancestor in ancestry(item) if ancestor.startswith(library)
+ ][1:]
class Permissions(BeetsPlugin):
@@ -56,18 +56,19 @@ def __init__(self):
super().__init__()
# Adding defaults.
- self.config.add({
- 'file': '644',
- 'dir': '755',
- })
+ self.config.add(
+ {
+ "file": "644",
+ "dir": "755",
+ }
+ )
- self.register_listener('item_imported', self.fix)
- self.register_listener('album_imported', self.fix)
- self.register_listener('art_set', self.fix_art)
+ self.register_listener("item_imported", self.fix)
+ self.register_listener("album_imported", self.fix)
+ self.register_listener("art_set", self.fix_art)
def fix(self, lib, item=None, album=None):
- """Fix the permissions for an imported Item or Album.
- """
+ """Fix the permissions for an imported Item or Album."""
files = []
dirs = set()
if item:
@@ -80,8 +81,7 @@ def fix(self, lib, item=None, album=None):
self.set_permissions(files=files, dirs=dirs)
def fix_art(self, album):
- """Fix the permission for Album art file.
- """
+ """Fix the permission for Album art file."""
if album.artpath:
self.set_permissions(files=[album.artpath])
@@ -90,15 +90,15 @@ def set_permissions(self, files=[], dirs=[]):
# string (in YAML quotes) or, for convenience, as an integer so the
# quotes can be omitted. In the latter case, we need to reinterpret the
# integer as octal, not decimal.
- file_perm = config['permissions']['file'].get()
- dir_perm = config['permissions']['dir'].get()
+ file_perm = config["permissions"]["file"].get()
+ dir_perm = config["permissions"]["dir"].get()
file_perm = convert_perm(file_perm)
dir_perm = convert_perm(dir_perm)
for path in files:
# Changing permissions on the destination file.
self._log.debug(
- 'setting file permissions on {}',
+ "setting file permissions on {}",
displayable_path(path),
)
if not check_permissions(path, file_perm):
@@ -111,7 +111,7 @@ def set_permissions(self, files=[], dirs=[]):
for path in dirs:
# Changing permissions on the destination directory.
self._log.debug(
- 'setting directory permissions on {}',
+ "setting directory permissions on {}",
displayable_path(path),
)
if not check_permissions(path, dir_perm):
diff --git a/beetsplug/play.py b/beetsplug/play.py
index f4233490f6..4884bc8beb 100644
--- a/beetsplug/play.py
+++ b/beetsplug/play.py
@@ -15,31 +15,37 @@
"""Send the results of a query to the configured music player as a playlist.
"""
+import shlex
+import subprocess
+from os.path import relpath
+from tempfile import NamedTemporaryFile
+
+from beets import config, ui, util
from beets.plugins import BeetsPlugin
from beets.ui import Subcommand
from beets.ui.commands import PromptChoice
-from beets import config
-from beets import ui
-from beets import util
-from os.path import relpath
-from tempfile import NamedTemporaryFile
-import subprocess
-import shlex
# Indicate where arguments should be inserted into the command string.
# If this is missing, they're placed at the end.
-ARGS_MARKER = '$args'
-
-
-def play(command_str, selection, paths, open_args, log, item_type='track',
- keep_open=False):
+ARGS_MARKER = "$args"
+
+
+def play(
+ command_str,
+ selection,
+ paths,
+ open_args,
+ log,
+ item_type="track",
+ keep_open=False,
+):
"""Play items in paths with command_str and optional arguments. If
keep_open, return to beets, otherwise exit once command runs.
"""
# Print number of tracks or albums to be played, log command to be run.
- item_type += 's' if len(selection) > 1 else ''
- ui.print_('Playing {} {}.'.format(len(selection), item_type))
- log.debug('executing command: {} {!r}', command_str, open_args)
+ item_type += "s" if len(selection) > 1 else ""
+ ui.print_("Playing {} {}.".format(len(selection), item_type))
+ log.debug("executing command: {} {!r}", command_str, open_args)
try:
if keep_open:
@@ -49,42 +55,44 @@ def play(command_str, selection, paths, open_args, log, item_type='track',
else:
util.interactive_open(open_args, command_str)
except OSError as exc:
- raise ui.UserError(
- f"Could not play the query: {exc}")
+ raise ui.UserError(f"Could not play the query: {exc}")
class PlayPlugin(BeetsPlugin):
-
def __init__(self):
super().__init__()
- config['play'].add({
- 'command': None,
- 'use_folders': False,
- 'relative_to': None,
- 'raw': False,
- 'warning_threshold': 100,
- 'bom': False,
- })
+ config["play"].add(
+ {
+ "command": None,
+ "use_folders": False,
+ "relative_to": None,
+ "raw": False,
+ "warning_threshold": 100,
+ "bom": False,
+ }
+ )
- self.register_listener('before_choose_candidate',
- self.before_choose_candidate_listener)
+ self.register_listener(
+ "before_choose_candidate", self.before_choose_candidate_listener
+ )
def commands(self):
play_command = Subcommand(
- 'play',
- help='send music to a player as a playlist'
+ "play", help="send music to a player as a playlist"
)
play_command.parser.add_album_option()
play_command.parser.add_option(
- '-A', '--args',
- action='store',
- help='add additional arguments to the command',
+ "-A",
+ "--args",
+ action="store",
+ help="add additional arguments to the command",
)
play_command.parser.add_option(
- '-y', '--yes',
+ "-y",
+ "--yes",
action="store_true",
- help='skip the warning threshold',
+ help="skip the warning threshold",
)
play_command.func = self._play_command
return [play_command]
@@ -93,8 +101,8 @@ def _play_command(self, lib, opts, args):
"""The CLI command function for `beet play`. Create a list of paths
from query, determine if tracks or albums are to be played.
"""
- use_folders = config['play']['use_folders'].get(bool)
- relative_to = config['play']['relative_to'].get()
+ use_folders = config["play"]["use_folders"].get(bool)
+ relative_to = config["play"]["relative_to"].get()
if relative_to:
relative_to = util.normpath(relative_to)
# Perform search by album and add folders rather than tracks to
@@ -108,22 +116,20 @@ def _play_command(self, lib, opts, args):
if use_folders:
paths.append(album.item_dir())
else:
- paths.extend(item.path
- for item in sort.sort(album.items()))
- item_type = 'album'
+ paths.extend(item.path for item in sort.sort(album.items()))
+ item_type = "album"
# Perform item query and add tracks to playlist.
else:
selection = lib.items(ui.decargs(args))
paths = [item.path for item in selection]
- item_type = 'track'
+ item_type = "track"
if relative_to:
paths = [relpath(path, relative_to) for path in paths]
if not selection:
- ui.print_(ui.colorize('text_warning',
- f'No {item_type} to play.'))
+ ui.print_(ui.colorize("text_warning", f"No {item_type} to play."))
return
open_args = self._playlist_or_paths(paths)
@@ -132,14 +138,13 @@ def _play_command(self, lib, opts, args):
# Check if the selection exceeds configured threshold. If True,
# cancel, otherwise proceed with play command.
if opts.yes or not self._exceeds_threshold(
- selection, command_str, open_args, item_type):
- play(command_str, selection, paths, open_args, self._log,
- item_type)
+ selection, command_str, open_args, item_type
+ ):
+ play(command_str, selection, paths, open_args, self._log, item_type)
def _command_str(self, args=None):
- """Create a command string from the config command and optional args.
- """
- command_str = config['play']['command'].get()
+ """Create a command string from the config command and optional args."""
+ command_str = config["play"]["command"].get()
if not command_str:
return util.open_anything()
# Add optional arguments to the player command.
@@ -153,57 +158,58 @@ def _command_str(self, args=None):
return command_str.replace(" " + ARGS_MARKER, "")
def _playlist_or_paths(self, paths):
- """Return either the raw paths of items or a playlist of the items.
- """
- if config['play']['raw']:
+ """Return either the raw paths of items or a playlist of the items."""
+ if config["play"]["raw"]:
return paths
else:
return [self._create_tmp_playlist(paths)]
- def _exceeds_threshold(self, selection, command_str, open_args,
- item_type='track'):
+ def _exceeds_threshold(
+ self, selection, command_str, open_args, item_type="track"
+ ):
"""Prompt user whether to abort if playlist exceeds threshold. If
True, cancel playback. If False, execute play command.
"""
- warning_threshold = config['play']['warning_threshold'].get(int)
+ warning_threshold = config["play"]["warning_threshold"].get(int)
# Warn user before playing any huge playlists.
if warning_threshold and len(selection) > warning_threshold:
if len(selection) > 1:
- item_type += 's'
-
- ui.print_(ui.colorize(
- 'text_warning',
- 'You are about to queue {} {}.'.format(
- len(selection), item_type)))
-
- if ui.input_options(('Continue', 'Abort')) == 'a':
+ item_type += "s"
+
+ ui.print_(
+ ui.colorize(
+ "text_warning",
+ "You are about to queue {} {}.".format(
+ len(selection), item_type
+ ),
+ )
+ )
+
+ if ui.input_options(("Continue", "Abort")) == "a":
return True
return False
def _create_tmp_playlist(self, paths_list):
- """Create a temporary .m3u file. Return the filename.
- """
- utf8_bom = config['play']['bom'].get(bool)
- m3u = NamedTemporaryFile('wb', suffix='.m3u', delete=False)
+ """Create a temporary .m3u file. Return the filename."""
+ utf8_bom = config["play"]["bom"].get(bool)
+ m3u = NamedTemporaryFile("wb", suffix=".m3u", delete=False)
if utf8_bom:
- m3u.write(b'\xEF\xBB\xBF')
+ m3u.write(b"\xEF\xBB\xBF")
for item in paths_list:
- m3u.write(item + b'\n')
+ m3u.write(item + b"\n")
m3u.close()
return m3u.name
def before_choose_candidate_listener(self, session, task):
- """Append a "Play" choice to the interactive importer prompt.
- """
- return [PromptChoice('y', 'plaY', self.importer_play)]
+ """Append a "Play" choice to the interactive importer prompt."""
+ return [PromptChoice("y", "plaY", self.importer_play)]
def importer_play(self, session, task):
- """Get items from current import task and send to play function.
- """
+ """Get items from current import task and send to play function."""
selection = task.items
paths = [item.path for item in selection]
@@ -211,5 +217,11 @@ def importer_play(self, session, task):
command_str = self._command_str()
if not self._exceeds_threshold(selection, command_str, open_args):
- play(command_str, selection, paths, open_args, self._log,
- keep_open=True)
+ play(
+ command_str,
+ selection,
+ paths,
+ open_args,
+ self._log,
+ keep_open=True,
+ )
diff --git a/beetsplug/playlist.py b/beetsplug/playlist.py
index 10215a8aa6..d40f4125f9 100644
--- a/beetsplug/playlist.py
+++ b/beetsplug/playlist.py
@@ -12,65 +12,70 @@
# included in all copies or substantial portions of the Software.
-import os
import fnmatch
+import os
import tempfile
from typing import Any, Optional, Sequence, Tuple
+
import beets
from beets.util import path_as_posix
class PlaylistQuery(beets.dbcore.NamedQuery):
- """Matches files listed by a playlist file.
- """
+ """Matches files listed by a playlist file."""
+
def __init__(self, pattern):
self.pattern = pattern
- config = beets.config['playlist']
+ config = beets.config["playlist"]
# Get the full path to the playlist
playlist_paths = (
pattern,
- os.path.abspath(os.path.join(
- config['playlist_dir'].as_filename(),
- f'{pattern}.m3u',
- )),
+ os.path.abspath(
+ os.path.join(
+ config["playlist_dir"].as_filename(),
+ f"{pattern}.m3u",
+ )
+ ),
)
self.paths = []
for playlist_path in playlist_paths:
- if not fnmatch.fnmatch(playlist_path, '*.[mM]3[uU]'):
+ if not fnmatch.fnmatch(playlist_path, "*.[mM]3[uU]"):
# This is not am M3U playlist, skip this candidate
continue
try:
- f = open(beets.util.syspath(playlist_path), mode='rb')
+ f = open(beets.util.syspath(playlist_path), mode="rb")
except OSError:
continue
- if config['relative_to'].get() == 'library':
- relative_to = beets.config['directory'].as_filename()
- elif config['relative_to'].get() == 'playlist':
+ if config["relative_to"].get() == "library":
+ relative_to = beets.config["directory"].as_filename()
+ elif config["relative_to"].get() == "playlist":
relative_to = os.path.dirname(playlist_path)
else:
- relative_to = config['relative_to'].as_filename()
+ relative_to = config["relative_to"].as_filename()
relative_to = beets.util.bytestring_path(relative_to)
for line in f:
- if line[0] == '#':
+ if line[0] == "#":
# ignore comments, and extm3u extension
continue
- self.paths.append(beets.util.normpath(
- os.path.join(relative_to, line.rstrip())
- ))
+ self.paths.append(
+ beets.util.normpath(
+ os.path.join(relative_to, line.rstrip())
+ )
+ )
f.close()
break
def clause(self) -> Tuple[Optional[str], Sequence[Any]]:
if not self.paths:
# Playlist is empty
- return '0', ()
- clause = 'path IN ({})'.format(', '.join('?' for path in self.paths))
+ return "0", ()
+ clause = "path IN ({})".format(", ".join("?" for path in self.paths))
return clause, (beets.library.BLOB_TYPE(p) for p in self.paths)
def match(self, item):
@@ -78,33 +83,37 @@ def match(self, item):
class PlaylistPlugin(beets.plugins.BeetsPlugin):
- item_queries = {'playlist': PlaylistQuery}
+ item_queries = {"playlist": PlaylistQuery}
def __init__(self):
super().__init__()
- self.config.add({
- 'auto': False,
- 'playlist_dir': '.',
- 'relative_to': 'library',
- 'forward_slash': False,
- })
-
- self.playlist_dir = self.config['playlist_dir'].as_filename()
+ self.config.add(
+ {
+ "auto": False,
+ "playlist_dir": ".",
+ "relative_to": "library",
+ "forward_slash": False,
+ }
+ )
+
+ self.playlist_dir = self.config["playlist_dir"].as_filename()
self.changes = {}
- if self.config['relative_to'].get() == 'library':
+ if self.config["relative_to"].get() == "library":
self.relative_to = beets.util.bytestring_path(
- beets.config['directory'].as_filename())
- elif self.config['relative_to'].get() != 'playlist':
+ beets.config["directory"].as_filename()
+ )
+ elif self.config["relative_to"].get() != "playlist":
self.relative_to = beets.util.bytestring_path(
- self.config['relative_to'].as_filename())
+ self.config["relative_to"].as_filename()
+ )
else:
self.relative_to = None
- if self.config['auto']:
- self.register_listener('item_moved', self.item_moved)
- self.register_listener('item_removed', self.item_removed)
- self.register_listener('cli_exit', self.cli_exit)
+ if self.config["auto"]:
+ self.register_listener("item_moved", self.item_moved)
+ self.register_listener("item_removed", self.item_removed)
+ self.register_listener("cli_exit", self.cli_exit)
def item_moved(self, item, source, destination):
self.changes[source] = destination
@@ -115,29 +124,36 @@ def item_removed(self, item):
def cli_exit(self, lib):
for playlist in self.find_playlists():
- self._log.info(f'Updating playlist: {playlist}')
+ self._log.info(f"Updating playlist: {playlist}")
base_dir = beets.util.bytestring_path(
- self.relative_to if self.relative_to
+ self.relative_to
+ if self.relative_to
else os.path.dirname(playlist)
)
try:
self.update_playlist(playlist, base_dir)
except beets.util.FilesystemError:
- self._log.error('Failed to update playlist: {}'.format(
- beets.util.displayable_path(playlist)))
+ self._log.error(
+ "Failed to update playlist: {}".format(
+ beets.util.displayable_path(playlist)
+ )
+ )
def find_playlists(self):
"""Find M3U playlists in the playlist directory."""
try:
dir_contents = os.listdir(beets.util.syspath(self.playlist_dir))
except OSError:
- self._log.warning('Unable to open playlist directory {}'.format(
- beets.util.displayable_path(self.playlist_dir)))
+ self._log.warning(
+ "Unable to open playlist directory {}".format(
+ beets.util.displayable_path(self.playlist_dir)
+ )
+ )
return
for filename in dir_contents:
- if fnmatch.fnmatch(filename, '*.[mM]3[uU]'):
+ if fnmatch.fnmatch(filename, "*.[mM]3[uU]"):
yield os.path.join(self.playlist_dir, filename)
def update_playlist(self, filename, base_dir):
@@ -145,11 +161,11 @@ def update_playlist(self, filename, base_dir):
changes = 0
deletions = 0
- with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as tempfp:
+ with tempfile.NamedTemporaryFile(mode="w+b", delete=False) as tempfp:
new_playlist = tempfp.name
- with open(filename, mode='rb') as fp:
+ with open(filename, mode="rb") as fp:
for line in fp:
- original_path = line.rstrip(b'\r\n')
+ original_path = line.rstrip(b"\r\n")
# Ensure that path from playlist is absolute
is_relative = not os.path.isabs(line)
@@ -161,7 +177,7 @@ def update_playlist(self, filename, base_dir):
try:
new_path = self.changes[beets.util.normpath(lookup)]
except KeyError:
- if self.config['forward_slash']:
+ if self.config["forward_slash"]:
line = path_as_posix(line)
tempfp.write(line)
else:
@@ -174,13 +190,15 @@ def update_playlist(self, filename, base_dir):
if is_relative:
new_path = os.path.relpath(new_path, base_dir)
line = line.replace(original_path, new_path)
- if self.config['forward_slash']:
+ if self.config["forward_slash"]:
line = path_as_posix(line)
tempfp.write(line)
if changes or deletions:
self._log.info(
- 'Updated playlist {} ({} changes, {} deletions)'.format(
- filename, changes, deletions))
+ "Updated playlist {} ({} changes, {} deletions)".format(
+ filename, changes, deletions
+ )
+ )
beets.util.copy(new_playlist, filename, replace=True)
beets.util.remove(new_playlist)
diff --git a/beetsplug/plexupdate.py b/beetsplug/plexupdate.py
index 2261a55f4f..003b9f8823 100644
--- a/beetsplug/plexupdate.py
+++ b/beetsplug/plexupdate.py
@@ -8,47 +8,51 @@
token: token
"""
-import requests
+from urllib.parse import urlencode, urljoin
from xml.etree import ElementTree
-from urllib.parse import urljoin, urlencode
+
+import requests
+
from beets import config
from beets.plugins import BeetsPlugin
-def get_music_section(host, port, token, library_name, secure,
- ignore_cert_errors):
- """Getting the section key for the music library in Plex.
- """
- api_endpoint = append_token('library/sections', token)
- url = urljoin('{}://{}:{}'.format(get_protocol(secure), host,
- port), api_endpoint)
+def get_music_section(
+ host, port, token, library_name, secure, ignore_cert_errors
+):
+ """Getting the section key for the music library in Plex."""
+ api_endpoint = append_token("library/sections", token)
+ url = urljoin(
+ "{}://{}:{}".format(get_protocol(secure), host, port), api_endpoint
+ )
# Sends request.
r = requests.get(url, verify=not ignore_cert_errors)
# Parse xml tree and extract music section key.
tree = ElementTree.fromstring(r.content)
- for child in tree.findall('Directory'):
- if child.get('title') == library_name:
- return child.get('key')
+ for child in tree.findall("Directory"):
+ if child.get("title") == library_name:
+ return child.get("key")
-def update_plex(host, port, token, library_name, secure,
- ignore_cert_errors):
- """Ignore certificate errors if configured to.
- """
+def update_plex(host, port, token, library_name, secure, ignore_cert_errors):
+ """Ignore certificate errors if configured to."""
if ignore_cert_errors:
import urllib3
+
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
"""Sends request to the Plex api to start a library refresh.
"""
# Getting section key and build url.
- section_key = get_music_section(host, port, token, library_name,
- secure, ignore_cert_errors)
- api_endpoint = f'library/sections/{section_key}/refresh'
+ section_key = get_music_section(
+ host, port, token, library_name, secure, ignore_cert_errors
+ )
+ api_endpoint = f"library/sections/{section_key}/refresh"
api_endpoint = append_token(api_endpoint, token)
- url = urljoin('{}://{}:{}'.format(get_protocol(secure), host,
- port), api_endpoint)
+ url = urljoin(
+ "{}://{}:{}".format(get_protocol(secure), host, port), api_endpoint
+ )
# Sends request and returns requests object.
r = requests.get(url, verify=not ignore_cert_errors)
@@ -56,18 +60,17 @@ def update_plex(host, port, token, library_name, secure,
def append_token(url, token):
- """Appends the Plex Home token to the api call if required.
- """
+ """Appends the Plex Home token to the api call if required."""
if token:
- url += '?' + urlencode({'X-Plex-Token': token})
+ url += "?" + urlencode({"X-Plex-Token": token})
return url
def get_protocol(secure):
if secure:
- return 'https'
+ return "https"
else:
- return 'http'
+ return "http"
class PlexUpdate(BeetsPlugin):
@@ -75,36 +78,39 @@ def __init__(self):
super().__init__()
# Adding defaults.
- config['plex'].add({
- 'host': 'localhost',
- 'port': 32400,
- 'token': '',
- 'library_name': 'Music',
- 'secure': False,
- 'ignore_cert_errors': False})
-
- config['plex']['token'].redact = True
- self.register_listener('database_change', self.listen_for_db_change)
+ config["plex"].add(
+ {
+ "host": "localhost",
+ "port": 32400,
+ "token": "",
+ "library_name": "Music",
+ "secure": False,
+ "ignore_cert_errors": False,
+ }
+ )
+
+ config["plex"]["token"].redact = True
+ self.register_listener("database_change", self.listen_for_db_change)
def listen_for_db_change(self, lib, model):
"""Listens for beets db change and register the update for the end"""
- self.register_listener('cli_exit', self.update)
+ self.register_listener("cli_exit", self.update)
def update(self, lib):
- """When the client exists try to send refresh request to Plex server.
- """
- self._log.info('Updating Plex library...')
+ """When the client exists try to send refresh request to Plex server."""
+ self._log.info("Updating Plex library...")
# Try to send update request.
try:
update_plex(
- config['plex']['host'].get(),
- config['plex']['port'].get(),
- config['plex']['token'].get(),
- config['plex']['library_name'].get(),
- config['plex']['secure'].get(bool),
- config['plex']['ignore_cert_errors'].get(bool))
- self._log.info('... started.')
+ config["plex"]["host"].get(),
+ config["plex"]["port"].get(),
+ config["plex"]["token"].get(),
+ config["plex"]["library_name"].get(),
+ config["plex"]["secure"].get(bool),
+ config["plex"]["ignore_cert_errors"].get(bool),
+ )
+ self._log.info("... started.")
except requests.exceptions.RequestException:
- self._log.warning('Update failed.')
+ self._log.warning("Update failed.")
diff --git a/beetsplug/random.py b/beetsplug/random.py
index ea9b7b98f8..dc94a0e3a1 100644
--- a/beetsplug/random.py
+++ b/beetsplug/random.py
@@ -16,13 +16,12 @@
"""
from beets.plugins import BeetsPlugin
-from beets.ui import Subcommand, decargs, print_
from beets.random import random_objs
+from beets.ui import Subcommand, decargs, print_
def random_func(lib, opts, args):
- """Select some random items or albums and print the results.
- """
+ """Select some random items or albums and print the results."""
# Fetch all the objects matching the query into a list.
query = decargs(args)
if opts.album:
@@ -31,23 +30,35 @@ def random_func(lib, opts, args):
objs = list(lib.items(query))
# Print a random subset.
- objs = random_objs(objs, opts.album, opts.number, opts.time,
- opts.equal_chance)
+ objs = random_objs(
+ objs, opts.album, opts.number, opts.time, opts.equal_chance
+ )
for obj in objs:
print_(format(obj))
-random_cmd = Subcommand('random',
- help='choose a random track or album')
+random_cmd = Subcommand("random", help="choose a random track or album")
random_cmd.parser.add_option(
- '-n', '--number', action='store', type="int",
- help='number of objects to choose', default=1)
+ "-n",
+ "--number",
+ action="store",
+ type="int",
+ help="number of objects to choose",
+ default=1,
+)
random_cmd.parser.add_option(
- '-e', '--equal-chance', action='store_true',
- help='each artist has the same chance')
+ "-e",
+ "--equal-chance",
+ action="store_true",
+ help="each artist has the same chance",
+)
random_cmd.parser.add_option(
- '-t', '--time', action='store', type="float",
- help='total length in minutes of objects to choose')
+ "-t",
+ "--time",
+ action="store",
+ type="float",
+ help="total length in minutes of objects to choose",
+)
random_cmd.parser.add_all_common_options()
random_cmd.func = random_func
diff --git a/beetsplug/replaygain.py b/beetsplug/replaygain.py
index dc3b8150df..639bb3754c 100644
--- a/beetsplug/replaygain.py
+++ b/beetsplug/replaygain.py
@@ -17,22 +17,27 @@
import enum
import math
import os
+import queue
import signal
import subprocess
import sys
import warnings
from multiprocessing.pool import ThreadPool
-import queue
-from threading import Thread, Event
+from threading import Event, Thread
from beets import ui
from beets.plugins import BeetsPlugin
-from beets.util import (syspath, command_output, displayable_path,
- py3_path, cpu_count)
-
+from beets.util import (
+ command_output,
+ cpu_count,
+ displayable_path,
+ py3_path,
+ syspath,
+)
# Utilities.
+
class ReplayGainError(Exception):
"""Raised when a local (to a track or an album) error occurs in one
of the backends.
@@ -40,8 +45,7 @@ class ReplayGainError(Exception):
class FatalReplayGainError(Exception):
- """Raised when a fatal error occurs in one of the backends.
- """
+ """Raised when a fatal error occurs in one of the backends."""
class FatalGstreamerPluginReplayGainError(FatalReplayGainError):
@@ -56,7 +60,7 @@ def call(args, log, **kwargs):
try:
return command_output(args, **kwargs)
except subprocess.CalledProcessError as e:
- log.debug(e.output.decode('utf8', 'ignore'))
+ log.debug(e.output.decode("utf8", "ignore"))
raise ReplayGainError(
"{} exited with status {}".format(args[0], e.returncode)
)
@@ -68,8 +72,9 @@ def call(args, log, **kwargs):
def after_version(version_a, version_b):
- return tuple(int(s) for s in version_a.split('.')) \
- >= tuple(int(s) for s in version_b.split('.'))
+ return tuple(int(s) for s in version_a.split(".")) >= tuple(
+ int(s) for s in version_b.split(".")
+ )
def db_to_lufs(db):
@@ -102,7 +107,7 @@ class PeakMethod(enum.Enum):
sample = 2
-class RgTask():
+class RgTask:
"""State and methods for a single replaygain calculation (rg version).
Bundles the state (parameters and results) of a single replaygain
@@ -112,8 +117,9 @@ class RgTask():
old rg tags.
"""
- def __init__(self, items, album, target_level, peak_method, backend_name,
- log):
+ def __init__(
+ self, items, album, target_level, peak_method, backend_name, log
+ ):
self.items = items
self.album = album
self.target_level = target_level
@@ -124,13 +130,15 @@ def __init__(self, items, album, target_level, peak_method, backend_name,
self.track_gains = None
def _store_track_gain(self, item, track_gain):
- """Store track gain for a single item in the database.
- """
+ """Store track gain for a single item in the database."""
item.rg_track_gain = track_gain.gain
item.rg_track_peak = track_gain.peak
item.store()
- self._log.debug('applied track gain {0} LU, peak {1} of FS',
- item.rg_track_gain, item.rg_track_peak)
+ self._log.debug(
+ "applied track gain {0} LU, peak {1} of FS",
+ item.rg_track_gain,
+ item.rg_track_peak,
+ )
def _store_album_gain(self, item):
"""Store album gain for a single item in the database.
@@ -140,50 +148,55 @@ def _store_album_gain(self, item):
item.rg_album_gain = self.album_gain.gain
item.rg_album_peak = self.album_gain.peak
item.store()
- self._log.debug('applied album gain {0} LU, peak {1} of FS',
- item.rg_album_gain, item.rg_album_peak)
+ self._log.debug(
+ "applied album gain {0} LU, peak {1} of FS",
+ item.rg_album_gain,
+ item.rg_album_peak,
+ )
def _store_track(self, write):
- """Store track gain for the first track of the task in the database.
- """
+ """Store track gain for the first track of the task in the database."""
item = self.items[0]
if self.track_gains is None or len(self.track_gains) != 1:
# In some cases, backends fail to produce a valid
# `track_gains` without throwing FatalReplayGainError
# => raise non-fatal exception & continue
raise ReplayGainError(
- "ReplayGain backend `{}` failed for track {}"
- .format(self.backend_name, item)
+ "ReplayGain backend `{}` failed for track {}".format(
+ self.backend_name, item
+ )
)
self._store_track_gain(item, self.track_gains[0])
if write:
item.try_write()
- self._log.debug('done analyzing {0}', item)
+ self._log.debug("done analyzing {0}", item)
def _store_album(self, write):
- """Store track/album gains for all tracks of the task in the database.
- """
- if (self.album_gain is None or self.track_gains is None
- or len(self.track_gains) != len(self.items)):
+ """Store track/album gains for all tracks of the task in the database."""
+ if (
+ self.album_gain is None
+ or self.track_gains is None
+ or len(self.track_gains) != len(self.items)
+ ):
# In some cases, backends fail to produce a valid
# `album_gain` without throwing FatalReplayGainError
# => raise non-fatal exception & continue
raise ReplayGainError(
"ReplayGain backend `{}` failed "
- "for some tracks in album {}"
- .format(self.backend_name, self.album)
+ "for some tracks in album {}".format(
+ self.backend_name, self.album
+ )
)
for item, track_gain in zip(self.items, self.track_gains):
self._store_track_gain(item, track_gain)
self._store_album_gain(item)
if write:
item.try_write()
- self._log.debug('done analyzing {0}', item)
+ self._log.debug("done analyzing {0}", item)
def store(self, write):
- """Store computed gains for the items of this task in the database.
- """
+ """Store computed gains for the items of this task in the database."""
if self.album is not None:
self._store_album(write)
else:
@@ -202,14 +215,12 @@ class R128Task(RgTask):
def __init__(self, items, album, target_level, backend_name, log):
# R128_* tags do not store the track/album peak
- super().__init__(items, album, target_level, None, backend_name,
- log)
+ super().__init__(items, album, target_level, None, backend_name, log)
def _store_track_gain(self, item, track_gain):
item.r128_track_gain = track_gain.gain
item.store()
- self._log.debug('applied r128 track gain {0} LU',
- item.r128_track_gain)
+ self._log.debug("applied r128 track gain {0} LU", item.r128_track_gain)
def _store_album_gain(self, item):
"""
@@ -218,13 +229,11 @@ def _store_album_gain(self, item):
"""
item.r128_album_gain = self.album_gain.gain
item.store()
- self._log.debug('applied r128 album gain {0} LU',
- item.r128_album_gain)
+ self._log.debug("applied r128 album gain {0} LU", item.r128_album_gain)
class Backend:
- """An abstract class representing engine for calculating RG values.
- """
+ """An abstract class representing engine for calculating RG values."""
NAME = ""
do_parallel = False
@@ -250,8 +259,7 @@ def compute_album_gain(self, task):
# ffmpeg backend
class FfmpegBackend(Backend):
- """A replaygain backend using ffmpeg's ebur128 filter.
- """
+ """A replaygain backend using ffmpeg's ebur128 filter."""
NAME = "ffmpeg"
do_parallel = True
@@ -294,7 +302,9 @@ def compute_track_gain(self, task):
task.target_level,
task.peak_method,
count_blocks=False,
- )[0] # take only the gain, discarding number of gating blocks
+ )[
+ 0
+ ] # take only the gain, discarding number of gating blocks
for item in task.items
]
@@ -334,7 +344,7 @@ def sum_of_track_powers(track_gain, track_n_blocks):
# This reverses ITU-R BS.1770-4 p. 6 equation (5) to convert
# from loudness to power. The result is the average gating
# block power.
- power = 10**((loudness + 0.691) / 10)
+ power = 10 ** ((loudness + 0.691) / 10)
# Multiply that average power by the number of gating blocks to get
# the sum of all block powers in this track.
@@ -358,7 +368,9 @@ def sum_of_track_powers(track_gain, track_n_blocks):
self._log.debug(
"{}: gain {} LU, peak {}",
- task.album, album_gain, album_peak,
+ task.album,
+ album_gain,
+ album_peak,
)
task.album_gain = Gain(album_gain, album_peak)
@@ -378,14 +390,14 @@ def _construct_cmd(self, item, peak_method):
"a:0",
"-filter",
"ebur128=peak={}".format(
- "none" if peak_method is None else peak_method.name),
+ "none" if peak_method is None else peak_method.name
+ ),
"-f",
"null",
"-",
]
- def _analyse_item(self, item, target_level, peak_method,
- count_blocks=True):
+ def _analyse_item(self, item, target_level, peak_method, count_blocks=True):
"""Analyse item. Return a pair of a Gain object and the number
of gating blocks above the threshold.
@@ -397,9 +409,7 @@ def _analyse_item(self, item, target_level, peak_method,
# call ffmpeg
self._log.debug(f"analyzing {item}")
cmd = self._construct_cmd(item, peak_method)
- self._log.debug(
- 'executing {0}', ' '.join(map(displayable_path, cmd))
- )
+ self._log.debug("executing {0}", " ".join(map(displayable_path, cmd)))
output = call(cmd, self._log).stderr.splitlines()
# parse output
@@ -411,26 +421,35 @@ def _analyse_item(self, item, target_level, peak_method,
output,
# `peak_method` is non-`None` in this arm of the conditional
f" {peak_method.name.capitalize()} peak:".encode(),
- start_line=len(output) - 1, step_size=-1,
+ start_line=len(output) - 1,
+ step_size=-1,
)
peak = self._parse_float(
- output[self._find_line(
- output, b" Peak:",
- line_peak,
- )]
+ output[
+ self._find_line(
+ output,
+ b" Peak:",
+ line_peak,
+ )
+ ]
)
# convert TPFS -> part of FS
- peak = 10**(peak / 20)
+ peak = 10 ** (peak / 20)
line_integrated_loudness = self._find_line(
- output, b" Integrated loudness:",
- start_line=len(output) - 1, step_size=-1,
+ output,
+ b" Integrated loudness:",
+ start_line=len(output) - 1,
+ step_size=-1,
)
gain = self._parse_float(
- output[self._find_line(
- output, b" I:",
- line_integrated_loudness,
- )]
+ output[
+ self._find_line(
+ output,
+ b" I:",
+ line_integrated_loudness,
+ )
+ ]
)
# convert LUFS -> LU from target level
gain = target_level_lufs - gain
@@ -439,10 +458,13 @@ def _analyse_item(self, item, target_level, peak_method,
n_blocks = 0
if count_blocks:
gating_threshold = self._parse_float(
- output[self._find_line(
- output, b" Threshold:",
- start_line=line_integrated_loudness,
- )]
+ output[
+ self._find_line(
+ output,
+ b" Threshold:",
+ start_line=line_integrated_loudness,
+ )
+ ]
)
for line in output:
if not line.startswith(b"[Parsed_ebur128"):
@@ -455,14 +477,12 @@ def _analyse_item(self, item, target_level, peak_method,
if self._parse_float(b"M: " + line[1]) >= gating_threshold:
n_blocks += 1
self._log.debug(
- "{}: {} blocks over {} LUFS"
- .format(item, n_blocks, gating_threshold)
+ "{}: {} blocks over {} LUFS".format(
+ item, n_blocks, gating_threshold
+ )
)
- self._log.debug(
- "{}: gain {} LU, peak {}"
- .format(item, gain, peak)
- )
+ self._log.debug("{}: gain {} LU, peak {}".format(item, gain, peak))
return Gain(gain, peak), n_blocks
@@ -476,9 +496,10 @@ def _find_line(self, output, search, start_line=0, step_size=1):
if output[i].startswith(search):
return i
raise ReplayGainError(
- "ffmpeg output: missing {} after line {}"
- .format(repr(search), start_line)
+ "ffmpeg output: missing {} after line {}".format(
+ repr(search), start_line
)
+ )
def _parse_float(self, line):
"""Extract a float from a key value pair in `line`.
@@ -490,9 +511,8 @@ def _parse_float(self, line):
value = line.split(b":", 1)
if len(value) < 2:
raise ReplayGainError(
- "ffmpeg output: expected key value pair, found {}"
- .format(line)
- )
+ "ffmpeg output: expected key value pair, found {}".format(line)
+ )
value = value[1].lstrip()
# strip unit
value = value.split(b" ", 1)[0]
@@ -501,9 +521,8 @@ def _parse_float(self, line):
return float(value)
except ValueError:
raise ReplayGainError(
- "ffmpeg output: expected float value, found {}"
- .format(value)
- )
+ "ffmpeg output: expected float value, found {}".format(value)
+ )
# mpgain/aacgain CLI tool backend.
@@ -513,10 +532,12 @@ class CommandBackend(Backend):
def __init__(self, config, log):
super().__init__(config, log)
- config.add({
- 'command': "",
- 'noclip': True,
- })
+ config.add(
+ {
+ "command": "",
+ "noclip": True,
+ }
+ )
self.command = config["command"].as_str()
@@ -524,23 +545,22 @@ def __init__(self, config, log):
# Explicit executable path.
if not os.path.isfile(self.command):
raise FatalReplayGainError(
- 'replaygain command does not exist: {}'.format(
- self.command)
+ "replaygain command does not exist: {}".format(self.command)
)
else:
# Check whether the program is in $PATH.
- for cmd in ('mp3gain', 'aacgain'):
+ for cmd in ("mp3gain", "aacgain"):
try:
- call([cmd, '-v'], self._log)
+ call([cmd, "-v"], self._log)
self.command = cmd
except OSError:
pass
if not self.command:
raise FatalReplayGainError(
- 'no replaygain command found: install mp3gain or aacgain'
+ "no replaygain command found: install mp3gain or aacgain"
)
- self.noclip = config['noclip'].get(bool)
+ self.noclip = config["noclip"].get(bool)
def compute_track_gain(self, task):
"""Computes the track gain for the tracks belonging to `task`, and sets
@@ -560,7 +580,7 @@ def compute_album_gain(self, task):
supported_items = list(filter(self.format_supported, task.items))
if len(supported_items) != len(task.items):
- self._log.debug('tracks are of unsupported format')
+ self._log.debug("tracks are of unsupported format")
task.album_gain = None
task.track_gains = None
return task
@@ -571,11 +591,10 @@ def compute_album_gain(self, task):
return task
def format_supported(self, item):
- """Checks whether the given item is supported by the selected tool.
- """
- if 'mp3gain' in self.command and item.format != 'MP3':
+ """Checks whether the given item is supported by the selected tool."""
+ if "mp3gain" in self.command and item.format != "MP3":
return False
- elif 'aacgain' in self.command and item.format not in ('MP3', 'AAC'):
+ elif "aacgain" in self.command and item.format not in ("MP3", "AAC"):
return False
return True
@@ -587,7 +606,7 @@ def compute_gain(self, items, target_level, is_album):
the album gain
"""
if not items:
- self._log.debug('no supported tracks to analyze')
+ self._log.debug("no supported tracks to analyze")
return []
"""Compute ReplayGain values and return a list of results
@@ -599,22 +618,23 @@ def compute_gain(self, items, target_level, is_album):
# tag-writing; this turns the mp3gain/aacgain tool into a gain
# calculator rather than a tag manipulator because we take care
# of changing tags ourselves.
- cmd = [self.command, '-o', '-s', 's']
+ cmd = [self.command, "-o", "-s", "s"]
if self.noclip:
# Adjust to avoid clipping.
- cmd = cmd + ['-k']
+ cmd = cmd + ["-k"]
else:
# Disable clipping warning.
- cmd = cmd + ['-c']
- cmd = cmd + ['-d', str(int(target_level - 89))]
+ cmd = cmd + ["-c"]
+ cmd = cmd + ["-d", str(int(target_level - 89))]
cmd = cmd + [syspath(i.path) for i in items]
- self._log.debug('analyzing {0} files', len(items))
+ self._log.debug("analyzing {0} files", len(items))
self._log.debug("executing {0}", " ".join(map(displayable_path, cmd)))
output = call(cmd, self._log).stdout
- self._log.debug('analysis finished')
- return self.parse_tool_output(output,
- len(items) + (1 if is_album else 0))
+ self._log.debug("analysis finished")
+ return self.parse_tool_output(
+ output, len(items) + (1 if is_album else 0)
+ )
def parse_tool_output(self, text, num_lines):
"""Given the tab-delimited output from an invocation of mp3gain
@@ -622,26 +642,26 @@ def parse_tool_output(self, text, num_lines):
containing information about each analyzed file.
"""
out = []
- for line in text.split(b'\n')[1:num_lines + 1]:
- parts = line.split(b'\t')
- if len(parts) != 6 or parts[0] == b'File':
- self._log.debug('bad tool output: {0}', text)
- raise ReplayGainError('mp3gain failed')
+ for line in text.split(b"\n")[1 : num_lines + 1]:
+ parts = line.split(b"\t")
+ if len(parts) != 6 or parts[0] == b"File":
+ self._log.debug("bad tool output: {0}", text)
+ raise ReplayGainError("mp3gain failed")
d = {
- 'file': parts[0],
- 'mp3gain': int(parts[1]),
- 'gain': float(parts[2]),
- 'peak': float(parts[3]) / (1 << 15),
- 'maxgain': int(parts[4]),
- 'mingain': int(parts[5]),
-
+ "file": parts[0],
+ "mp3gain": int(parts[1]),
+ "gain": float(parts[2]),
+ "peak": float(parts[3]) / (1 << 15),
+ "maxgain": int(parts[4]),
+ "mingain": int(parts[5]),
}
- out.append(Gain(d['gain'], d['peak']))
+ out.append(Gain(d["gain"], d["peak"]))
return out
# GStreamer-based backend.
+
class GStreamerBackend(Backend):
NAME = "gstreamer"
@@ -660,8 +680,13 @@ def __init__(self, config, log):
self._res = self.Gst.ElementFactory.make("audioresample", "res")
self._rg = self.Gst.ElementFactory.make("rganalysis", "rg")
- if self._src is None or self._decbin is None or self._conv is None \
- or self._res is None or self._rg is None:
+ if (
+ self._src is None
+ or self._decbin is None
+ or self._conv is None
+ or self._res is None
+ or self._rg is None
+ ):
raise FatalGstreamerPluginReplayGainError(
"Failed to load required GStreamer plugins"
)
@@ -712,13 +737,12 @@ def _import_gst(self):
)
try:
- gi.require_version('Gst', '1.0')
+ gi.require_version("Gst", "1.0")
except ValueError as e:
- raise FatalReplayGainError(
- f"Failed to load GStreamer 1.0: {e}"
- )
+ raise FatalReplayGainError(f"Failed to load GStreamer 1.0: {e}")
+
+ from gi.repository import GLib, GObject, Gst
- from gi.repository import GObject, Gst, GLib
# Calling GObject.threads_init() is not needed for
# PyGObject 3.10.2+
with warnings.catch_warnings():
@@ -759,8 +783,12 @@ def compute_track_gain(self, task):
ret = []
for item in task.items:
- ret.append(Gain(self._file_tags[item.path]["TRACK_GAIN"],
- self._file_tags[item.path]["TRACK_PEAK"]))
+ ret.append(
+ Gain(
+ self._file_tags[item.path]["TRACK_GAIN"],
+ self._file_tags[item.path]["TRACK_PEAK"],
+ )
+ )
task.track_gains = ret
return task
@@ -826,20 +854,25 @@ def handle_tag(taglist, tag, userdata):
# store the computed tags, we overwrite the RG values of
# received a second time.
if tag == self.Gst.TAG_TRACK_GAIN:
- self._file_tags[self._file]["TRACK_GAIN"] = \
- taglist.get_double(tag)[1]
+ self._file_tags[self._file]["TRACK_GAIN"] = taglist.get_double(
+ tag
+ )[1]
elif tag == self.Gst.TAG_TRACK_PEAK:
- self._file_tags[self._file]["TRACK_PEAK"] = \
- taglist.get_double(tag)[1]
+ self._file_tags[self._file]["TRACK_PEAK"] = taglist.get_double(
+ tag
+ )[1]
elif tag == self.Gst.TAG_ALBUM_GAIN:
- self._file_tags[self._file]["ALBUM_GAIN"] = \
- taglist.get_double(tag)[1]
+ self._file_tags[self._file]["ALBUM_GAIN"] = taglist.get_double(
+ tag
+ )[1]
elif tag == self.Gst.TAG_ALBUM_PEAK:
- self._file_tags[self._file]["ALBUM_PEAK"] = \
- taglist.get_double(tag)[1]
+ self._file_tags[self._file]["ALBUM_PEAK"] = taglist.get_double(
+ tag
+ )[1]
elif tag == self.Gst.TAG_REFERENCE_LEVEL:
- self._file_tags[self._file]["REFERENCE_LEVEL"] = \
- taglist.get_double(tag)[1]
+ self._file_tags[self._file][
+ "REFERENCE_LEVEL"
+ ] = taglist.get_double(tag)[1]
tags.foreach(handle_tag, None)
@@ -854,8 +887,7 @@ def _set_first_file(self):
return True
def _set_file(self):
- """Initialize the filesrc element with the next file to be analyzed.
- """
+ """Initialize the filesrc element with the next file to be analyzed."""
# No more files, we're done
if len(self._files) == 0:
return False
@@ -901,9 +933,9 @@ def _set_next_file(self):
if ret:
# Seek to the beginning in order to clear the EOS state of the
# various elements of the pipeline
- self._pipe.seek_simple(self.Gst.Format.TIME,
- self.Gst.SeekFlags.FLUSH,
- 0)
+ self._pipe.seek_simple(
+ self.Gst.Format.TIME, self.Gst.SeekFlags.FLUSH, 0
+ )
self._pipe.set_state(self.Gst.State.PLAYING)
return ret
@@ -925,6 +957,7 @@ class AudioToolsBackend(Backend):
`_ and its capabilities to read more
file formats and compute ReplayGain values using it replaygain module.
"""
+
NAME = "audiotools"
def __init__(self, config, log):
@@ -959,13 +992,9 @@ def open_audio_file(self, item):
try:
audiofile = self._mod_audiotools.open(py3_path(syspath(item.path)))
except OSError:
- raise ReplayGainError(
- f"File {item.path} was not found"
- )
+ raise ReplayGainError(f"File {item.path} was not found")
except self._mod_audiotools.UnsupportedFile:
- raise ReplayGainError(
- f"Unsupported file type {item.format}"
- )
+ raise ReplayGainError(f"Unsupported file type {item.format}")
return audiofile
@@ -982,8 +1011,7 @@ def init_replaygain(self, audiofile, item):
try:
rg = self._mod_replaygain.ReplayGain(audiofile.sample_rate())
except ValueError:
- raise ReplayGainError(
- f"Unsupported sample rate {item.samplerate}")
+ raise ReplayGainError(f"Unsupported sample rate {item.samplerate}")
return
return rg
@@ -991,8 +1019,9 @@ def compute_track_gain(self, task):
"""Computes the track gain for the tracks belonging to `task`, and sets
the `track_gains` attribute on the task. Returns `task`.
"""
- gains = [self._compute_track_gain(i, task.target_level)
- for i in task.items]
+ gains = [
+ self._compute_track_gain(i, task.target_level) for i in task.items
+ ]
task.track_gains = gains
return task
@@ -1017,8 +1046,8 @@ def _title_gain(self, rg, audiofile, target_level):
except ValueError as exc:
# `audiotools.replaygain` can raise a `ValueError` if the sample
# rate is incorrect.
- self._log.debug('error in rg.title_gain() call: {}', exc)
- raise ReplayGainError('audiotools audio data error')
+ self._log.debug("error in rg.title_gain() call: {}", exc)
+ raise ReplayGainError("audiotools audio data error")
return self._with_target_level(gain, target_level), peak
def _compute_track_gain(self, item, target_level):
@@ -1035,8 +1064,13 @@ def _compute_track_gain(self, item, target_level):
rg, audiofile, target_level
)
- self._log.debug('ReplayGain for track {0} - {1}: {2:.2f}, {3:.2f}',
- item.artist, item.title, rg_track_gain, rg_track_peak)
+ self._log.debug(
+ "ReplayGain for track {0} - {1}: {2:.2f}, {3:.2f}",
+ item.artist,
+ item.title,
+ rg_track_gain,
+ rg_track_peak,
+ )
return Gain(gain=rg_track_gain, peak=rg_track_peak)
def compute_album_gain(self, task):
@@ -1056,19 +1090,26 @@ def compute_album_gain(self, task):
rg_track_gain, rg_track_peak = self._title_gain(
rg, audiofile, task.target_level
)
- track_gains.append(
- Gain(gain=rg_track_gain, peak=rg_track_peak)
+ track_gains.append(Gain(gain=rg_track_gain, peak=rg_track_peak))
+ self._log.debug(
+ "ReplayGain for track {0}: {1:.2f}, {2:.2f}",
+ item,
+ rg_track_gain,
+ rg_track_peak,
)
- self._log.debug('ReplayGain for track {0}: {1:.2f}, {2:.2f}',
- item, rg_track_gain, rg_track_peak)
# After getting the values for all tracks, it's possible to get the
# album values.
rg_album_gain, rg_album_peak = rg.album_gain()
rg_album_gain = self._with_target_level(
- rg_album_gain, task.target_level)
- self._log.debug('ReplayGain for album {0}: {1:.2f}, {2:.2f}',
- task.items[0].album, rg_album_gain, rg_album_peak)
+ rg_album_gain, task.target_level
+ )
+ self._log.debug(
+ "ReplayGain for album {0}: {1:.2f}, {2:.2f}",
+ task.items[0].album,
+ rg_album_gain,
+ rg_album_peak,
+ )
task.album_gain = Gain(gain=rg_album_gain, peak=rg_album_peak)
task.track_gains = track_gains
@@ -1077,7 +1118,7 @@ def compute_album_gain(self, task):
class ExceptionWatcher(Thread):
"""Monitors a queue for exceptions asynchronously.
- Once an exception occurs, raise it and execute a callback.
+ Once an exception occurs, raise it and execute a callback.
"""
def __init__(self, queue, callback):
@@ -1114,39 +1155,39 @@ def join(self, timeout=None):
class ReplayGainPlugin(BeetsPlugin):
- """Provides ReplayGain analysis.
- """
+ """Provides ReplayGain analysis."""
def __init__(self):
super().__init__()
# default backend is 'command' for backward-compatibility.
- self.config.add({
- 'overwrite': False,
- 'auto': True,
- 'backend': 'command',
- 'threads': cpu_count(),
- 'parallel_on_import': False,
- 'per_disc': False,
- 'peak': 'true',
- 'targetlevel': 89,
- 'r128': ['Opus'],
- 'r128_targetlevel': lufs_to_db(-23),
- })
+ self.config.add(
+ {
+ "overwrite": False,
+ "auto": True,
+ "backend": "command",
+ "threads": cpu_count(),
+ "parallel_on_import": False,
+ "per_disc": False,
+ "peak": "true",
+ "targetlevel": 89,
+ "r128": ["Opus"],
+ "r128_targetlevel": lufs_to_db(-23),
+ }
+ )
# FIXME: Consider renaming the configuration option and deprecating the
# old name 'overwrite'.
- self.force_on_import = self.config['overwrite'].get(bool)
+ self.force_on_import = self.config["overwrite"].get(bool)
# Remember which backend is used for CLI feedback
- self.backend_name = self.config['backend'].as_str()
+ self.backend_name = self.config["backend"].as_str()
if self.backend_name not in BACKENDS:
raise ui.UserError(
"Selected ReplayGain backend {} is not supported. "
"Please select one of: {}".format(
- self.backend_name,
- ', '.join(BACKENDS.keys())
+ self.backend_name, ", ".join(BACKENDS.keys())
)
)
@@ -1157,8 +1198,7 @@ def __init__(self):
raise ui.UserError(
"Selected ReplayGain peak method {} is not supported. "
"Please select one of: {}".format(
- peak_method,
- ', '.join(PeakMethod.__members__)
+ peak_method, ", ".join(PeakMethod.__members__)
)
)
# This only applies to plain old rg tags, r128 doesn't store peak
@@ -1166,21 +1206,20 @@ def __init__(self):
self.peak_method = PeakMethod[peak_method]
# On-import analysis.
- if self.config['auto']:
- self.register_listener('import_begin', self.import_begin)
- self.register_listener('import', self.import_end)
+ if self.config["auto"]:
+ self.register_listener("import_begin", self.import_begin)
+ self.register_listener("import", self.import_end)
self.import_stages = [self.imported]
# Formats to use R128.
- self.r128_whitelist = self.config['r128'].as_str_seq()
+ self.r128_whitelist = self.config["r128"].as_str_seq()
try:
self.backend_instance = BACKENDS[self.backend_name](
self.config, self._log
)
except (ReplayGainError, FatalReplayGainError) as e:
- raise ui.UserError(
- f'replaygain initialization failed: {e}')
+ raise ui.UserError(f"replaygain initialization failed: {e}")
# Start threadpool lazily.
self.pool = None
@@ -1197,8 +1236,7 @@ def has_r128_track_data(item):
@staticmethod
def has_rg_track_data(item):
- return (item.rg_track_gain is not None
- and item.rg_track_peak is not None)
+ return item.rg_track_gain is not None and item.rg_track_peak is not None
def track_requires_gain(self, item):
if self.should_use_r128(item):
@@ -1212,13 +1250,14 @@ def track_requires_gain(self, item):
@staticmethod
def has_r128_album_data(item):
- return (item.r128_track_gain is not None
- and item.r128_album_gain is not None)
+ return (
+ item.r128_track_gain is not None
+ and item.r128_album_gain is not None
+ )
@staticmethod
def has_rg_album_data(item):
- return (item.rg_album_gain is not None
- and item.rg_album_peak is not None)
+ return item.rg_album_gain is not None and item.rg_album_peak is not None
def album_requires_gain(self, album):
# Skip calculating gain only when *all* files don't need
@@ -1238,14 +1277,16 @@ def album_requires_gain(self, album):
def create_task(self, items, use_r128, album=None):
if use_r128:
return R128Task(
- items, album,
+ items,
+ album,
self.config["r128_targetlevel"].as_number(),
self.backend_instance.NAME,
self._log,
)
else:
return RgTask(
- items, album,
+ items,
+ album,
self.config["targetlevel"].as_number(),
self.peak_method,
self.backend_instance.NAME,
@@ -1261,7 +1302,7 @@ def handle_album(self, album, write, force=False):
items, nothing is done.
"""
if not force and not self.album_requires_gain(album):
- self._log.info('Skipping album {0}', album)
+ self._log.info("Skipping album {0}", album)
return
items_iter = iter(album.items())
@@ -1269,13 +1310,14 @@ def handle_album(self, album, write, force=False):
if any(use_r128 != self.should_use_r128(i) for i in items_iter):
self._log.error(
"Cannot calculate gain for album {0} (incompatible formats)",
- album)
+ album,
+ )
return
- self._log.info('analyzing {0}', album)
+ self._log.info("analyzing {0}", album)
discs = {}
- if self.config['per_disc'].get(bool):
+ if self.config["per_disc"].get(bool):
for item in album.items():
if discs.get(item.disc) is None:
discs[item.disc] = []
@@ -1288,14 +1330,14 @@ def handle_album(self, album, write, force=False):
try:
self._apply(
self.backend_instance.compute_album_gain,
- args=[task], kwds={},
- callback=lambda task: task.store(write)
+ args=[task],
+ kwds={},
+ callback=lambda task: task.store(write),
)
except ReplayGainError as e:
self._log.info("ReplayGain error: {0}", e)
except FatalReplayGainError as e:
- raise ui.UserError(
- f"Fatal replay gain error: {e}")
+ raise ui.UserError(f"Fatal replay gain error: {e}")
def handle_track(self, item, write, force=False):
"""Compute track replay gain and store it in the item.
@@ -1305,7 +1347,7 @@ def handle_track(self, item, write, force=False):
in the item, nothing is done.
"""
if not force and not self.track_requires_gain(item):
- self._log.info('Skipping track {0}', item)
+ self._log.info("Skipping track {0}", item)
return
use_r128 = self.should_use_r128(item)
@@ -1314,8 +1356,9 @@ def handle_track(self, item, write, force=False):
try:
self._apply(
self.backend_instance.compute_track_gain,
- args=[task], kwds={},
- callback=lambda task: task.store(write)
+ args=[task],
+ kwds={},
+ callback=lambda task: task.store(write),
)
except ReplayGainError as e:
self._log.info("ReplayGain error: {0}", e)
@@ -1323,8 +1366,7 @@ def handle_track(self, item, write, force=False):
raise ui.UserError(f"Fatal replay gain error: {e}")
def open_pool(self, threads):
- """Open a `ThreadPool` instance in `self.pool`
- """
+ """Open a `ThreadPool` instance in `self.pool`"""
if self.pool is None and self.backend_instance.do_parallel:
self.pool = ThreadPool(threads)
self.exc_queue = queue.Queue()
@@ -1332,23 +1374,24 @@ def open_pool(self, threads):
signal.signal(signal.SIGINT, self._interrupt)
self.exc_watcher = ExceptionWatcher(
- self.exc_queue, # threads push exceptions here
- self.terminate_pool # abort once an exception occurs
+ self.exc_queue, # threads push exceptions here
+ self.terminate_pool, # abort once an exception occurs
)
self.exc_watcher.start()
def _apply(self, func, args, kwds, callback):
if self.pool is not None:
+
def handle_exc(exc):
- """Handle exceptions in the async work.
- """
+ """Handle exceptions in the async work."""
if isinstance(exc, ReplayGainError):
self._log.info(exc.args[0]) # Log non-fatal exceptions.
else:
self.exc_queue.put(exc)
- self.pool.apply_async(func, args, kwds, callback,
- error_callback=handle_exc)
+ self.pool.apply_async(
+ func, args, kwds, callback, error_callback=handle_exc
+ )
else:
callback(func(*args, **kwds))
@@ -1367,7 +1410,7 @@ def terminate_pool(self):
def _interrupt(self, signal, frame):
try:
- self._log.info('interrupted')
+ self._log.info("interrupted")
self.terminate_pool()
sys.exit(0)
except SystemExit:
@@ -1375,8 +1418,7 @@ def _interrupt(self, signal, frame):
pass
def close_pool(self):
- """Regularly close the `ThreadPool` instance in `self.pool`.
- """
+ """Regularly close the `ThreadPool` instance in `self.pool`."""
if self.pool is not None:
self.pool.close()
self.pool.join()
@@ -1384,24 +1426,23 @@ def close_pool(self):
self.pool = None
def import_begin(self, session):
- """Handle `import_begin` event -> open pool
- """
- threads = self.config['threads'].get(int)
-
- if self.config['parallel_on_import'] \
- and self.config['auto'] \
- and threads:
+ """Handle `import_begin` event -> open pool"""
+ threads = self.config["threads"].get(int)
+
+ if (
+ self.config["parallel_on_import"]
+ and self.config["auto"]
+ and threads
+ ):
self.open_pool(threads)
def import_end(self, paths):
- """Handle `import` event -> close pool
- """
+ """Handle `import` event -> close pool"""
self.close_pool()
def imported(self, session, task):
- """Add replay gain info to items or albums of ``task``.
- """
- if self.config['auto']:
+ """Add replay gain info to items or albums of ``task``."""
+ if self.config["auto"]:
if task.is_album:
self.handle_album(task.album, False, self.force_on_import)
else:
@@ -1414,22 +1455,24 @@ def command_func(self, lib, opts, args):
# Bypass self.open_pool() if called with `--threads 0`
if opts.threads != 0:
- threads = opts.threads or self.config['threads'].get(int)
+ threads = opts.threads or self.config["threads"].get(int)
self.open_pool(threads)
if opts.album:
albums = lib.albums(ui.decargs(args))
self._log.info(
- "Analyzing {} albums ~ {} backend..."
- .format(len(albums), self.backend_name)
+ "Analyzing {} albums ~ {} backend...".format(
+ len(albums), self.backend_name
+ )
)
for album in albums:
self.handle_album(album, write, force)
else:
items = lib.items(ui.decargs(args))
self._log.info(
- "Analyzing {} tracks ~ {} backend..."
- .format(len(items), self.backend_name)
+ "Analyzing {} tracks ~ {} backend...".format(
+ len(items), self.backend_name
+ )
)
for item in items:
self.handle_track(item, write, force)
@@ -1440,24 +1483,39 @@ def command_func(self, lib, opts, args):
pass
def commands(self):
- """Return the "replaygain" ui subcommand.
- """
- cmd = ui.Subcommand('replaygain', help='analyze for ReplayGain')
+ """Return the "replaygain" ui subcommand."""
+ cmd = ui.Subcommand("replaygain", help="analyze for ReplayGain")
cmd.parser.add_album_option()
cmd.parser.add_option(
- "-t", "--threads", dest="threads", type=int,
- help='change the number of threads, \
- defaults to maximum available processors'
+ "-t",
+ "--threads",
+ dest="threads",
+ type=int,
+ help="change the number of threads, \
+ defaults to maximum available processors",
)
cmd.parser.add_option(
- "-f", "--force", dest="force", action="store_true", default=False,
+ "-f",
+ "--force",
+ dest="force",
+ action="store_true",
+ default=False,
help="analyze all files, including those that "
- "already have ReplayGain metadata")
+ "already have ReplayGain metadata",
+ )
cmd.parser.add_option(
- "-w", "--write", default=None, action="store_true",
- help="write new metadata to files' tags")
+ "-w",
+ "--write",
+ default=None,
+ action="store_true",
+ help="write new metadata to files' tags",
+ )
cmd.parser.add_option(
- "-W", "--nowrite", dest="write", action="store_false",
- help="don't write metadata (opposite of -w)")
+ "-W",
+ "--nowrite",
+ dest="write",
+ action="store_false",
+ help="don't write metadata (opposite of -w)",
+ )
cmd.func = self.command_func
return [cmd]
diff --git a/beetsplug/rewrite.py b/beetsplug/rewrite.py
index e02e4080d2..83829d6579 100644
--- a/beetsplug/rewrite.py
+++ b/beetsplug/rewrite.py
@@ -19,9 +19,8 @@
import re
from collections import defaultdict
+from beets import library, ui
from beets.plugins import BeetsPlugin
-from beets import ui
-from beets import library
def rewriter(field, rules):
@@ -29,6 +28,7 @@ def rewriter(field, rules):
with the given rewriting rules. ``rules`` must be a list of
(pattern, replacement) pairs.
"""
+
def fieldfunc(item):
value = item._values_fixed[field]
for pattern, replacement in rules:
@@ -37,6 +37,7 @@ def fieldfunc(item):
return replacement
# Not activated; return original value.
return value
+
return fieldfunc
@@ -55,15 +56,16 @@ def __init__(self):
except ValueError:
raise ui.UserError("invalid rewrite specification")
if fieldname not in library.Item._fields:
- raise ui.UserError("invalid field name (%s) in rewriter" %
- fieldname)
- self._log.debug('adding template field {0}', key)
+ raise ui.UserError(
+ "invalid field name (%s) in rewriter" % fieldname
+ )
+ self._log.debug("adding template field {0}", key)
pattern = re.compile(pattern.lower())
rules[fieldname].append((pattern, value))
- if fieldname == 'artist':
+ if fieldname == "artist":
# Special case for the artist field: apply the same
# rewrite for "albumartist" as well.
- rules['albumartist'].append((pattern, value))
+ rules["albumartist"].append((pattern, value))
# Replace each template field with the new rewriter function.
for fieldname, fieldrules in rules.items():
diff --git a/beetsplug/scrub.py b/beetsplug/scrub.py
index 484ec073aa..d1e63ee312 100644
--- a/beetsplug/scrub.py
+++ b/beetsplug/scrub.py
@@ -17,74 +17,78 @@
"""
-from beets.plugins import BeetsPlugin
-from beets import ui
-from beets import util
-from beets import config
import mediafile
import mutagen
+from beets import config, ui, util
+from beets.plugins import BeetsPlugin
+
_MUTAGEN_FORMATS = {
- 'asf': 'ASF',
- 'apev2': 'APEv2File',
- 'flac': 'FLAC',
- 'id3': 'ID3FileType',
- 'mp3': 'MP3',
- 'mp4': 'MP4',
- 'oggflac': 'OggFLAC',
- 'oggspeex': 'OggSpeex',
- 'oggtheora': 'OggTheora',
- 'oggvorbis': 'OggVorbis',
- 'oggopus': 'OggOpus',
- 'trueaudio': 'TrueAudio',
- 'wavpack': 'WavPack',
- 'monkeysaudio': 'MonkeysAudio',
- 'optimfrog': 'OptimFROG',
+ "asf": "ASF",
+ "apev2": "APEv2File",
+ "flac": "FLAC",
+ "id3": "ID3FileType",
+ "mp3": "MP3",
+ "mp4": "MP4",
+ "oggflac": "OggFLAC",
+ "oggspeex": "OggSpeex",
+ "oggtheora": "OggTheora",
+ "oggvorbis": "OggVorbis",
+ "oggopus": "OggOpus",
+ "trueaudio": "TrueAudio",
+ "wavpack": "WavPack",
+ "monkeysaudio": "MonkeysAudio",
+ "optimfrog": "OptimFROG",
}
class ScrubPlugin(BeetsPlugin):
"""Removes extraneous metadata from files' tags."""
+
def __init__(self):
super().__init__()
- self.config.add({
- 'auto': True,
- })
+ self.config.add(
+ {
+ "auto": True,
+ }
+ )
- if self.config['auto']:
+ if self.config["auto"]:
self.register_listener("import_task_files", self.import_task_files)
def commands(self):
def scrub_func(lib, opts, args):
# Walk through matching files and remove tags.
for item in lib.items(ui.decargs(args)):
- self._log.info('scrubbing: {0}',
- util.displayable_path(item.path))
+ self._log.info(
+ "scrubbing: {0}", util.displayable_path(item.path)
+ )
self._scrub_item(item, opts.write)
- scrub_cmd = ui.Subcommand('scrub', help='clean audio tags')
+ scrub_cmd = ui.Subcommand("scrub", help="clean audio tags")
scrub_cmd.parser.add_option(
- '-W', '--nowrite', dest='write',
- action='store_false', default=True,
- help='leave tags empty')
+ "-W",
+ "--nowrite",
+ dest="write",
+ action="store_false",
+ default=True,
+ help="leave tags empty",
+ )
scrub_cmd.func = scrub_func
return [scrub_cmd]
@staticmethod
def _mutagen_classes():
- """Get a list of file type classes from the Mutagen module.
- """
+ """Get a list of file type classes from the Mutagen module."""
classes = []
for modname, clsname in _MUTAGEN_FORMATS.items():
- mod = __import__(f'mutagen.{modname}',
- fromlist=[clsname])
+ mod = __import__(f"mutagen.{modname}", fromlist=[clsname])
classes.append(getattr(mod, clsname))
return classes
def _scrub(self, path):
- """Remove all tags from a file.
- """
+ """Remove all tags from a file."""
for cls in self._mutagen_classes():
# Try opening the file with this type, but just skip in the
# event of any error.
@@ -106,8 +110,9 @@ def _scrub(self, path):
del f[tag]
f.save()
except (OSError, mutagen.MutagenError) as exc:
- self._log.error('could not scrub {0}: {1}',
- util.displayable_path(path), exc)
+ self._log.error(
+ "could not scrub {0}: {1}", util.displayable_path(path), exc
+ )
def _scrub_item(self, item, restore):
"""Remove tags from an Item's associated file and, if `restore`
@@ -116,11 +121,11 @@ def _scrub_item(self, item, restore):
# Get album art if we need to restore it.
if restore:
try:
- mf = mediafile.MediaFile(util.syspath(item.path),
- config['id3v23'].get(bool))
+ mf = mediafile.MediaFile(
+ util.syspath(item.path), config["id3v23"].get(bool)
+ )
except mediafile.UnreadableFileError as exc:
- self._log.error('could not open file to scrub: {0}',
- exc)
+ self._log.error("could not open file to scrub: {0}", exc)
return
images = mf.images
@@ -129,21 +134,23 @@ def _scrub_item(self, item, restore):
# Restore tags, if enabled.
if restore:
- self._log.debug('writing new tags after scrub')
+ self._log.debug("writing new tags after scrub")
item.try_write()
if images:
- self._log.debug('restoring art')
+ self._log.debug("restoring art")
try:
- mf = mediafile.MediaFile(util.syspath(item.path),
- config['id3v23'].get(bool))
+ mf = mediafile.MediaFile(
+ util.syspath(item.path), config["id3v23"].get(bool)
+ )
mf.images = images
mf.save()
except mediafile.UnreadableFileError as exc:
- self._log.error('could not write tags: {0}', exc)
+ self._log.error("could not write tags: {0}", exc)
def import_task_files(self, session, task):
"""Automatically scrub imported files."""
for item in task.imported_items():
- self._log.debug('auto-scrubbing {0}',
- util.displayable_path(item.path))
+ self._log.debug(
+ "auto-scrubbing {0}", util.displayable_path(item.path)
+ )
self._scrub_item(item, ui.should_write())
diff --git a/beetsplug/smartplaylist.py b/beetsplug/smartplaylist.py
index 7abc7e3195..6e20cc21b0 100644
--- a/beetsplug/smartplaylist.py
+++ b/beetsplug/smartplaylist.py
@@ -16,49 +16,60 @@
"""
-from beets.plugins import BeetsPlugin
-from beets.plugins import send as send_event
+import os
+from urllib.request import pathname2url
+
from beets import ui
-from beets.util import (mkdirall, normpath, sanitize_path, syspath,
- bytestring_path, path_as_posix, displayable_path)
-from beets.library import Item, Album, parse_query_string
from beets.dbcore import OrQuery
from beets.dbcore.query import MultipleSort, ParsingError
-import os
-from urllib.request import pathname2url
+from beets.library import Album, Item, parse_query_string
+from beets.plugins import BeetsPlugin
+from beets.plugins import send as send_event
+from beets.util import (
+ bytestring_path,
+ displayable_path,
+ mkdirall,
+ normpath,
+ path_as_posix,
+ sanitize_path,
+ syspath,
+)
class SmartPlaylistPlugin(BeetsPlugin):
-
def __init__(self):
super().__init__()
- self.config.add({
- 'relative_to': None,
- 'playlist_dir': '.',
- 'auto': True,
- 'playlists': [],
- 'forward_slash': False,
- 'prefix': '',
- 'urlencode': False,
- 'pretend_paths': False,
- })
-
- self.config['prefix'].redact = True # May contain username/password.
+ self.config.add(
+ {
+ "relative_to": None,
+ "playlist_dir": ".",
+ "auto": True,
+ "playlists": [],
+ "forward_slash": False,
+ "prefix": "",
+ "urlencode": False,
+ "pretend_paths": False,
+ }
+ )
+
+ self.config["prefix"].redact = True # May contain username/password.
self._matched_playlists = None
self._unmatched_playlists = None
- if self.config['auto']:
- self.register_listener('database_change', self.db_change)
+ if self.config["auto"]:
+ self.register_listener("database_change", self.db_change)
def commands(self):
spl_update = ui.Subcommand(
- 'splupdate',
- help='update the smart playlists. Playlist names may be '
- 'passed as arguments.'
+ "splupdate",
+ help="update the smart playlists. Playlist names may be "
+ "passed as arguments.",
)
spl_update.parser.add_option(
- '-p', '--pretend', action='store_true',
- help="display query results but don't write playlist files."
+ "-p",
+ "--pretend",
+ action="store_true",
+ help="display query results but don't write playlist files.",
)
spl_update.func = self.update_cmd
return [spl_update]
@@ -71,13 +82,16 @@ def update_cmd(self, lib, opts, args):
if not a.endswith(".m3u"):
args.add(f"{a}.m3u")
- playlists = {(name, q, a_q)
- for name, q, a_q in self._unmatched_playlists
- if name in args}
+ playlists = {
+ (name, q, a_q)
+ for name, q, a_q in self._unmatched_playlists
+ if name in args
+ }
if not playlists:
raise ui.UserError(
- 'No playlist matching any of {} found'.format(
- [name for name, _, _ in self._unmatched_playlists])
+ "No playlist matching any of {} found".format(
+ [name for name, _, _ in self._unmatched_playlists]
+ )
)
self._matched_playlists = playlists
@@ -105,15 +119,14 @@ def build_queries(self):
self._unmatched_playlists = set()
self._matched_playlists = set()
- for playlist in self.config['playlists'].get(list):
- if 'name' not in playlist:
+ for playlist in self.config["playlists"].get(list):
+ if "name" not in playlist:
self._log.warning("playlist configuration is missing name")
continue
- playlist_data = (playlist['name'],)
+ playlist_data = (playlist["name"],)
try:
- for key, model_cls in (('query', Item),
- ('album_query', Album)):
+ for key, model_cls in (("query", Item), ("album_query", Album)):
qs = playlist.get(key)
if qs is None:
query_and_sort = None, None
@@ -123,8 +136,9 @@ def build_queries(self):
query_and_sort = parse_query_string(qs[0], model_cls)
else:
# multiple queries and sorts
- queries, sorts = zip(*(parse_query_string(q, model_cls)
- for q in qs))
+ queries, sorts = zip(
+ *(parse_query_string(q, model_cls) for q in qs)
+ )
query = OrQuery(queries)
final_sorts = []
for s in sorts:
@@ -136,7 +150,7 @@ def build_queries(self):
if not final_sorts:
sort = None
elif len(final_sorts) == 1:
- sort, = final_sorts
+ (sort,) = final_sorts
else:
sort = MultipleSort(final_sorts)
query_and_sort = query, sort
@@ -144,8 +158,9 @@ def build_queries(self):
playlist_data += (query_and_sort,)
except ParsingError as exc:
- self._log.warning("invalid query in playlist {}: {}",
- playlist['name'], exc)
+ self._log.warning(
+ "invalid query in playlist {}: {}", playlist["name"], exc
+ )
continue
self._unmatched_playlists.add(playlist_data)
@@ -164,24 +179,26 @@ def db_change(self, lib, model):
for playlist in self._unmatched_playlists:
n, (q, _), (a_q, _) = playlist
if self.matches(model, q, a_q):
- self._log.debug(
- "{0} will be updated because of {1}", n, model)
+ self._log.debug("{0} will be updated because of {1}", n, model)
self._matched_playlists.add(playlist)
- self.register_listener('cli_exit', self.update_playlists)
+ self.register_listener("cli_exit", self.update_playlists)
self._unmatched_playlists -= self._matched_playlists
def update_playlists(self, lib, pretend=False):
if pretend:
- self._log.info("Showing query results for {0} smart playlists...",
- len(self._matched_playlists))
+ self._log.info(
+ "Showing query results for {0} smart playlists...",
+ len(self._matched_playlists),
+ )
else:
- self._log.info("Updating {0} smart playlists...",
- len(self._matched_playlists))
+ self._log.info(
+ "Updating {0} smart playlists...", len(self._matched_playlists)
+ )
- playlist_dir = self.config['playlist_dir'].as_filename()
+ playlist_dir = self.config["playlist_dir"].as_filename()
playlist_dir = bytestring_path(playlist_dir)
- relative_to = self.config['relative_to'].get()
+ relative_to = self.config["relative_to"].get()
if relative_to:
relative_to = normpath(relative_to)
@@ -191,7 +208,7 @@ def update_playlists(self, lib, pretend=False):
for playlist in self._matched_playlists:
name, (query, q_sort), (album_query, a_q_sort) = playlist
if pretend:
- self._log.info('Results for playlist {}:', name)
+ self._log.info("Results for playlist {}:", name)
else:
self._log.info("Creating playlist {0}", name)
items = []
@@ -214,31 +231,35 @@ def update_playlists(self, lib, pretend=False):
item_path = os.path.relpath(item.path, relative_to)
if item_path not in m3us[m3u_name]:
m3us[m3u_name].append(item_path)
- if pretend and self.config['pretend_paths']:
+ if pretend and self.config["pretend_paths"]:
print(displayable_path(item_path))
elif pretend:
print(item)
if not pretend:
- prefix = bytestring_path(self.config['prefix'].as_str())
+ prefix = bytestring_path(self.config["prefix"].as_str())
# Write all of the accumulated track lists to files.
for m3u in m3us:
- m3u_path = normpath(os.path.join(playlist_dir,
- bytestring_path(m3u)))
+ m3u_path = normpath(
+ os.path.join(playlist_dir, bytestring_path(m3u))
+ )
mkdirall(m3u_path)
- with open(syspath(m3u_path), 'wb') as f:
+ with open(syspath(m3u_path), "wb") as f:
for path in m3us[m3u]:
- if self.config['forward_slash'].get():
+ if self.config["forward_slash"].get():
path = path_as_posix(path)
- if self.config['urlencode']:
+ if self.config["urlencode"]:
path = bytestring_path(pathname2url(path))
- f.write(prefix + path + b'\n')
+ f.write(prefix + path + b"\n")
# Send an event when playlists were updated.
send_event("smartplaylist_update")
if pretend:
- self._log.info("Displayed results for {0} playlists",
- len(self._matched_playlists))
+ self._log.info(
+ "Displayed results for {0} playlists",
+ len(self._matched_playlists),
+ )
else:
- self._log.info("{0} playlists updated",
- len(self._matched_playlists))
+ self._log.info(
+ "{0} playlists updated", len(self._matched_playlists)
+ )
diff --git a/beetsplug/sonosupdate.py b/beetsplug/sonosupdate.py
index 5d01fd93c8..af3410ffa8 100644
--- a/beetsplug/sonosupdate.py
+++ b/beetsplug/sonosupdate.py
@@ -16,31 +16,32 @@
This is based on the Kodi Update plugin.
"""
-from beets.plugins import BeetsPlugin
import soco
+from beets.plugins import BeetsPlugin
+
class SonosUpdate(BeetsPlugin):
def __init__(self):
super().__init__()
- self.register_listener('database_change', self.listen_for_db_change)
+ self.register_listener("database_change", self.listen_for_db_change)
def listen_for_db_change(self, lib, model):
"""Listens for beets db change and register the update"""
- self.register_listener('cli_exit', self.update)
+ self.register_listener("cli_exit", self.update)
def update(self, lib):
"""When the client exists try to send refresh request to a Sonos
controller.
"""
- self._log.info('Requesting a Sonos library update...')
+ self._log.info("Requesting a Sonos library update...")
device = soco.discovery.any_soco()
if device:
device.music_library.start_library_update()
else:
- self._log.warning('Could not find a Sonos device.')
+ self._log.warning("Could not find a Sonos device.")
return
- self._log.info('Sonos update triggered')
+ self._log.info("Sonos update triggered")
diff --git a/beetsplug/spotify.py b/beetsplug/spotify.py
index a95e09cc81..987b18aafa 100644
--- a/beetsplug/spotify.py
+++ b/beetsplug/spotify.py
@@ -27,6 +27,7 @@
import confuse
import requests
import unidecode
+
from beets import ui
from beets.autotag.hooks import AlbumInfo, TrackInfo
from beets.dbcore import types
@@ -42,71 +43,71 @@ class SpotifyAPIError(Exception):
class SpotifyPlugin(MetadataSourcePlugin, BeetsPlugin):
- data_source = 'Spotify'
+ data_source = "Spotify"
item_types = {
- 'spotify_track_popularity': types.INTEGER,
- 'spotify_acousticness': types.FLOAT,
- 'spotify_danceability': types.FLOAT,
- 'spotify_energy': types.FLOAT,
- 'spotify_instrumentalness': types.FLOAT,
- 'spotify_key': types.FLOAT,
- 'spotify_liveness': types.FLOAT,
- 'spotify_loudness': types.FLOAT,
- 'spotify_mode': types.INTEGER,
- 'spotify_speechiness': types.FLOAT,
- 'spotify_tempo': types.FLOAT,
- 'spotify_time_signature': types.INTEGER,
- 'spotify_valence': types.FLOAT,
- 'spotify_updated': DateType(),
+ "spotify_track_popularity": types.INTEGER,
+ "spotify_acousticness": types.FLOAT,
+ "spotify_danceability": types.FLOAT,
+ "spotify_energy": types.FLOAT,
+ "spotify_instrumentalness": types.FLOAT,
+ "spotify_key": types.FLOAT,
+ "spotify_liveness": types.FLOAT,
+ "spotify_loudness": types.FLOAT,
+ "spotify_mode": types.INTEGER,
+ "spotify_speechiness": types.FLOAT,
+ "spotify_tempo": types.FLOAT,
+ "spotify_time_signature": types.INTEGER,
+ "spotify_valence": types.FLOAT,
+ "spotify_updated": DateType(),
}
# Base URLs for the Spotify API
# Documentation: https://developer.spotify.com/web-api
- oauth_token_url = 'https://accounts.spotify.com/api/token'
- open_track_url = 'https://open.spotify.com/track/'
- search_url = 'https://api.spotify.com/v1/search'
- album_url = 'https://api.spotify.com/v1/albums/'
- track_url = 'https://api.spotify.com/v1/tracks/'
- audio_features_url = 'https://api.spotify.com/v1/audio-features/'
+ oauth_token_url = "https://accounts.spotify.com/api/token"
+ open_track_url = "https://open.spotify.com/track/"
+ search_url = "https://api.spotify.com/v1/search"
+ album_url = "https://api.spotify.com/v1/albums/"
+ track_url = "https://api.spotify.com/v1/tracks/"
+ audio_features_url = "https://api.spotify.com/v1/audio-features/"
id_regex = spotify_id_regex
spotify_audio_features = {
- 'acousticness': 'spotify_acousticness',
- 'danceability': 'spotify_danceability',
- 'energy': 'spotify_energy',
- 'instrumentalness': 'spotify_instrumentalness',
- 'key': 'spotify_key',
- 'liveness': 'spotify_liveness',
- 'loudness': 'spotify_loudness',
- 'mode': 'spotify_mode',
- 'speechiness': 'spotify_speechiness',
- 'tempo': 'spotify_tempo',
- 'time_signature': 'spotify_time_signature',
- 'valence': 'spotify_valence',
+ "acousticness": "spotify_acousticness",
+ "danceability": "spotify_danceability",
+ "energy": "spotify_energy",
+ "instrumentalness": "spotify_instrumentalness",
+ "key": "spotify_key",
+ "liveness": "spotify_liveness",
+ "loudness": "spotify_loudness",
+ "mode": "spotify_mode",
+ "speechiness": "spotify_speechiness",
+ "tempo": "spotify_tempo",
+ "time_signature": "spotify_time_signature",
+ "valence": "spotify_valence",
}
def __init__(self):
super().__init__()
self.config.add(
{
- 'mode': 'list',
- 'tiebreak': 'popularity',
- 'show_failures': False,
- 'artist_field': 'albumartist',
- 'album_field': 'album',
- 'track_field': 'title',
- 'region_filter': None,
- 'regex': [],
- 'client_id': '4e414367a1d14c75a5c5129a627fcab8',
- 'client_secret': 'f82bdc09b2254f1a8286815d02fd46dc',
- 'tokenfile': 'spotify_token.json',
+ "mode": "list",
+ "tiebreak": "popularity",
+ "show_failures": False,
+ "artist_field": "albumartist",
+ "album_field": "album",
+ "track_field": "title",
+ "region_filter": None,
+ "regex": [],
+ "client_id": "4e414367a1d14c75a5c5129a627fcab8",
+ "client_secret": "f82bdc09b2254f1a8286815d02fd46dc",
+ "tokenfile": "spotify_token.json",
}
)
- self.config['client_secret'].redact = True
+ self.config["client_secret"].redact = True
- self.tokenfile = self.config['tokenfile'].get(
+ self.tokenfile = self.config["tokenfile"].get(
confuse.Filename(in_app_dir=True)
) # Path to the JSON file for storing the OAuth access token.
self.setup()
@@ -119,46 +120,45 @@ def setup(self):
except OSError:
self._authenticate()
else:
- self.access_token = token_data['access_token']
+ self.access_token = token_data["access_token"]
def _authenticate(self):
"""Request an access token via the Client Credentials Flow:
https://developer.spotify.com/documentation/general/guides/authorization-guide/#client-credentials-flow
"""
headers = {
- 'Authorization': 'Basic {}'.format(
+ "Authorization": "Basic {}".format(
base64.b64encode(
- ':'.join(
+ ":".join(
self.config[k].as_str()
- for k in ('client_id', 'client_secret')
+ for k in ("client_id", "client_secret")
).encode()
).decode()
)
}
response = requests.post(
self.oauth_token_url,
- data={'grant_type': 'client_credentials'},
+ data={"grant_type": "client_credentials"},
headers=headers,
)
try:
response.raise_for_status()
except requests.exceptions.HTTPError as e:
raise ui.UserError(
- 'Spotify authorization failed: {}\n{}'.format(
- e, response.text
- )
+ "Spotify authorization failed: {}\n{}".format(e, response.text)
)
- self.access_token = response.json()['access_token']
+ self.access_token = response.json()["access_token"]
# Save the token for later use.
self._log.debug(
- '{} access token: {}', self.data_source, self.access_token
+ "{} access token: {}", self.data_source, self.access_token
)
- with open(self.tokenfile, 'w') as f:
- json.dump({'access_token': self.access_token}, f)
+ with open(self.tokenfile, "w") as f:
+ json.dump({"access_token": self.access_token}, f)
- def _handle_response(self, request_type, url, params=None, retry_count=0,
- max_retries=3):
+ def _handle_response(
+ self, request_type, url, params=None, retry_count=0, max_retries=3
+ ):
"""Send a request, reauthenticating if necessary.
:param request_type: Type of :class:`Request` constructor,
@@ -175,48 +175,56 @@ def _handle_response(self, request_type, url, params=None, retry_count=0,
try:
response = request_type(
url,
- headers={'Authorization': f'Bearer {self.access_token}'},
+ headers={"Authorization": f"Bearer {self.access_token}"},
params=params,
timeout=10,
)
response.raise_for_status()
return response.json()
except requests.exceptions.ReadTimeout:
- self._log.error('ReadTimeout.')
- raise SpotifyAPIError('Request timed out.')
+ self._log.error("ReadTimeout.")
+ raise SpotifyAPIError("Request timed out.")
except requests.exceptions.RequestException as e:
if e.response.status_code == 401:
self._log.debug(
- f'{self.data_source} access token has expired. '
- f'Reauthenticating.'
+ f"{self.data_source} access token has expired. "
+ f"Reauthenticating."
)
self._authenticate()
return self._handle_response(request_type, url, params=params)
elif e.response.status_code == 404:
- raise SpotifyAPIError(f'API Error: {e.response.status_code}\n'
- f'URL: {url}\nparams: {params}')
+ raise SpotifyAPIError(
+ f"API Error: {e.response.status_code}\n"
+ f"URL: {url}\nparams: {params}"
+ )
elif e.response.status_code == 429:
if retry_count >= max_retries:
- raise SpotifyAPIError('Maximum retries reached.')
- seconds = response.headers.get('Retry-After',
- DEFAULT_WAITING_TIME)
- self._log.debug(f'Too many API requests. Retrying after '
- f'{seconds} seconds.')
+ raise SpotifyAPIError("Maximum retries reached.")
+ seconds = response.headers.get(
+ "Retry-After", DEFAULT_WAITING_TIME
+ )
+ self._log.debug(
+ f"Too many API requests. Retrying after "
+ f"{seconds} seconds."
+ )
time.sleep(int(seconds) + 1)
- return self._handle_response(request_type, url,
- params=params,
- retry_count=retry_count + 1)
+ return self._handle_response(
+ request_type,
+ url,
+ params=params,
+ retry_count=retry_count + 1,
+ )
elif e.response.status_code == 503:
- self._log.error('Service Unavailable.')
- raise SpotifyAPIError('Service Unavailable.')
+ self._log.error("Service Unavailable.")
+ raise SpotifyAPIError("Service Unavailable.")
elif e.response is not None:
raise SpotifyAPIError(
- f'{self.data_source} API error:\n{e.response.text}\n'
- f'URL:\n{url}\nparams:\n{params}'
+ f"{self.data_source} API error:\n{e.response.text}\n"
+ f"URL:\n{url}\nparams:\n{params}"
)
else:
- self._log.error(f'Request failed. Error: {e}')
- raise SpotifyAPIError('Request failed.')
+ self._log.error(f"Request failed. Error: {e}")
+ raise SpotifyAPIError("Request failed.")
def album_for_id(self, album_id):
"""Fetch an album by its Spotify ID or URL and return an
@@ -227,29 +235,29 @@ def album_for_id(self, album_id):
:return: AlbumInfo object for album
:rtype: beets.autotag.hooks.AlbumInfo or None
"""
- spotify_id = self._get_id('album', album_id, self.id_regex)
+ spotify_id = self._get_id("album", album_id, self.id_regex)
if spotify_id is None:
return None
album_data = self._handle_response(
requests.get, self.album_url + spotify_id
)
- if album_data['name'] == "":
+ if album_data["name"] == "":
self._log.debug("Album removed from Spotify: {}", album_id)
return None
- artist, artist_id = self.get_artist(album_data['artists'])
+ artist, artist_id = self.get_artist(album_data["artists"])
date_parts = [
- int(part) for part in album_data['release_date'].split('-')
+ int(part) for part in album_data["release_date"].split("-")
]
- release_date_precision = album_data['release_date_precision']
- if release_date_precision == 'day':
+ release_date_precision = album_data["release_date_precision"]
+ if release_date_precision == "day":
year, month, day = date_parts
- elif release_date_precision == 'month':
+ elif release_date_precision == "month":
year, month = date_parts
day = None
- elif release_date_precision == 'year':
+ elif release_date_precision == "year":
year = date_parts[0]
month = None
day = None
@@ -261,12 +269,13 @@ def album_for_id(self, album_id):
)
)
- tracks_data = album_data['tracks']
- tracks_items = tracks_data['items']
- while tracks_data['next']:
- tracks_data = self._handle_response(requests.get,
- tracks_data['next'])
- tracks_items.extend(tracks_data['items'])
+ tracks_data = album_data["tracks"]
+ tracks_items = tracks_data["items"]
+ while tracks_data["next"]:
+ tracks_data = self._handle_response(
+ requests.get, tracks_data["next"]
+ )
+ tracks_items.extend(tracks_data["items"])
tracks = []
medium_totals = collections.defaultdict(int)
@@ -279,23 +288,23 @@ def album_for_id(self, album_id):
track.medium_total = medium_totals[track.medium]
return AlbumInfo(
- album=album_data['name'],
+ album=album_data["name"],
album_id=spotify_id,
spotify_album_id=spotify_id,
artist=artist,
artist_id=artist_id,
spotify_artist_id=artist_id,
tracks=tracks,
- albumtype=album_data['album_type'],
- va=len(album_data['artists']) == 1
- and artist.lower() == 'various artists',
+ albumtype=album_data["album_type"],
+ va=len(album_data["artists"]) == 1
+ and artist.lower() == "various artists",
year=year,
month=month,
day=day,
- label=album_data['label'],
+ label=album_data["label"],
mediums=max(medium_totals.keys()),
data_source=self.data_source,
- data_url=album_data['external_urls']['spotify'],
+ data_url=album_data["external_urls"]["spotify"],
)
def _get_track(self, track_data):
@@ -307,27 +316,27 @@ def _get_track(self, track_data):
:return: TrackInfo object for track
:rtype: beets.autotag.hooks.TrackInfo
"""
- artist, artist_id = self.get_artist(track_data['artists'])
+ artist, artist_id = self.get_artist(track_data["artists"])
# Get album information for spotify tracks
try:
- album = track_data['album']['name']
+ album = track_data["album"]["name"]
except (KeyError, TypeError):
album = None
return TrackInfo(
- title=track_data['name'],
- track_id=track_data['id'],
- spotify_track_id=track_data['id'],
+ title=track_data["name"],
+ track_id=track_data["id"],
+ spotify_track_id=track_data["id"],
artist=artist,
album=album,
artist_id=artist_id,
spotify_artist_id=artist_id,
- length=track_data['duration_ms'] / 1000,
- index=track_data['track_number'],
- medium=track_data['disc_number'],
- medium_index=track_data['track_number'],
+ length=track_data["duration_ms"] / 1000,
+ index=track_data["track_number"],
+ medium=track_data["disc_number"],
+ medium_index=track_data["track_number"],
data_source=self.data_source,
- data_url=track_data['external_urls']['spotify'],
+ data_url=track_data["external_urls"]["spotify"],
)
def track_for_id(self, track_id=None, track_data=None):
@@ -344,7 +353,7 @@ def track_for_id(self, track_id=None, track_data=None):
:rtype: beets.autotag.hooks.TrackInfo or None
"""
if track_data is None:
- spotify_id = self._get_id('track', track_id, self.id_regex)
+ spotify_id = self._get_id("track", track_id, self.id_regex)
if spotify_id is None:
return None
track_data = self._handle_response(
@@ -356,19 +365,19 @@ def track_for_id(self, track_id=None, track_data=None):
# release) and `track.medium_total` (total number of tracks on
# the track's disc).
album_data = self._handle_response(
- requests.get, self.album_url + track_data['album']['id']
+ requests.get, self.album_url + track_data["album"]["id"]
)
medium_total = 0
- for i, track_data in enumerate(album_data['tracks']['items'], start=1):
- if track_data['disc_number'] == track.medium:
+ for i, track_data in enumerate(album_data["tracks"]["items"], start=1):
+ if track_data["disc_number"] == track.medium:
medium_total += 1
- if track_data['id'] == track.track_id:
+ if track_data["id"] == track.track_id:
track.index = i
track.medium_total = medium_total
return track
@staticmethod
- def _construct_search_query(filters=None, keywords=''):
+ def _construct_search_query(filters=None, keywords=""):
"""Construct a query string with the specified filters and keywords to
be provided to the Spotify Search API
(https://developer.spotify.com/documentation/web-api/reference/search/search/#writing-a-query---guidelines).
@@ -382,14 +391,14 @@ def _construct_search_query(filters=None, keywords=''):
"""
query_components = [
keywords,
- ' '.join(':'.join((k, v)) for k, v in filters.items()),
+ " ".join(":".join((k, v)) for k, v in filters.items()),
]
- query = ' '.join([q for q in query_components if q])
+ query = " ".join([q for q in query_components if q])
if not isinstance(query, str):
- query = query.decode('utf8')
+ query = query.decode("utf8")
return unidecode.unidecode(query)
- def _search_api(self, query_type, filters=None, keywords=''):
+ def _search_api(self, query_type, filters=None, keywords=""):
"""Query the Spotify Search API for the specified ``keywords``,
applying the provided ``filters``.
@@ -404,25 +413,20 @@ def _search_api(self, query_type, filters=None, keywords=''):
if no search results are returned.
:rtype: dict or None
"""
- query = self._construct_search_query(
- keywords=keywords, filters=filters
- )
+ query = self._construct_search_query(keywords=keywords, filters=filters)
if not query:
return None
- self._log.debug(
- f"Searching {self.data_source} for '{query}'"
- )
+ self._log.debug(f"Searching {self.data_source} for '{query}'")
try:
response = self._handle_response(
requests.get,
self.search_url,
- params={'q': query, 'type': query_type},
+ params={"q": query, "type": query_type},
)
except SpotifyAPIError as e:
- self._log.debug('Spotify API error: {}', e)
+ self._log.debug("Spotify API error: {}", e)
return []
- response_data = (response.get(query_type + 's', {})
- .get('items', []))
+ response_data = response.get(query_type + "s", {}).get("items", [])
self._log.debug(
"Found {} result(s) from {} for '{}'",
len(response_data),
@@ -440,33 +444,37 @@ def queries(lib, opts, args):
self._output_match_results(results)
spotify_cmd = ui.Subcommand(
- 'spotify', help=f'build a {self.data_source} playlist'
+ "spotify", help=f"build a {self.data_source} playlist"
)
spotify_cmd.parser.add_option(
- '-m',
- '--mode',
- action='store',
+ "-m",
+ "--mode",
+ action="store",
help='"open" to open {} with playlist, '
'"list" to print (default)'.format(self.data_source),
)
spotify_cmd.parser.add_option(
- '-f',
- '--show-failures',
- action='store_true',
- dest='show_failures',
- help='list tracks that did not match a {} ID'.format(
+ "-f",
+ "--show-failures",
+ action="store_true",
+ dest="show_failures",
+ help="list tracks that did not match a {} ID".format(
self.data_source
),
)
spotify_cmd.func = queries
# spotifysync command
- sync_cmd = ui.Subcommand('spotifysync',
- help="fetch track attributes from Spotify")
+ sync_cmd = ui.Subcommand(
+ "spotifysync", help="fetch track attributes from Spotify"
+ )
sync_cmd.parser.add_option(
- '-f', '--force', dest='force_refetch',
- action='store_true', default=False,
- help='re-download data when already present'
+ "-f",
+ "--force",
+ dest="force_refetch",
+ action="store_true",
+ default=False,
+ help="re-download data when already present",
)
def func(lib, opts, args):
@@ -478,14 +486,14 @@ def func(lib, opts, args):
def _parse_opts(self, opts):
if opts.mode:
- self.config['mode'].set(opts.mode)
+ self.config["mode"].set(opts.mode)
if opts.show_failures:
- self.config['show_failures'].set(True)
+ self.config["show_failures"].set(True)
- if self.config['mode'].get() not in ['list', 'open']:
+ if self.config["mode"].get() not in ["list", "open"]:
self._log.warning(
- '{0} is not a valid mode', self.config['mode'].get()
+ "{0} is not a valid mode", self.config["mode"].get()
)
return False
@@ -511,37 +519,37 @@ def _match_library_tracks(self, library, keywords):
if not items:
self._log.debug(
- 'Your beets query returned no items, skipping {}.',
+ "Your beets query returned no items, skipping {}.",
self.data_source,
)
return
- self._log.info('Processing {} tracks...', len(items))
+ self._log.info("Processing {} tracks...", len(items))
for item in items:
# Apply regex transformations if provided
- for regex in self.config['regex'].get():
+ for regex in self.config["regex"].get():
if (
- not regex['field']
- or not regex['search']
- or not regex['replace']
+ not regex["field"]
+ or not regex["search"]
+ or not regex["replace"]
):
continue
- value = item[regex['field']]
- item[regex['field']] = re.sub(
- regex['search'], regex['replace'], value
+ value = item[regex["field"]]
+ item[regex["field"]] = re.sub(
+ regex["search"], regex["replace"], value
)
# Custom values can be passed in the config (just in case)
- artist = item[self.config['artist_field'].get()]
- album = item[self.config['album_field'].get()]
- keywords = item[self.config['track_field'].get()]
+ artist = item[self.config["artist_field"].get()]
+ album = item[self.config["album_field"].get()]
+ keywords = item[self.config["track_field"].get()]
# Query the Web API for each track, look for the items' JSON data
- query_filters = {'artist': artist, 'album': album}
+ query_filters = {"artist": artist, "album": album}
response_data_tracks = self._search_api(
- query_type='track', keywords=keywords, filters=query_filters
+ query_type="track", keywords=keywords, filters=query_filters
)
if not response_data_tracks:
query = self._construct_search_query(
@@ -551,20 +559,20 @@ def _match_library_tracks(self, library, keywords):
continue
# Apply market filter if requested
- region_filter = self.config['region_filter'].get()
+ region_filter = self.config["region_filter"].get()
if region_filter:
response_data_tracks = [
track_data
for track_data in response_data_tracks
- if region_filter in track_data['available_markets']
+ if region_filter in track_data["available_markets"]
]
if (
len(response_data_tracks) == 1
- or self.config['tiebreak'].get() == 'first'
+ or self.config["tiebreak"].get() == "first"
):
self._log.debug(
- '{} track(s) found, count: {}',
+ "{} track(s) found, count: {}",
self.data_source,
len(response_data_tracks),
)
@@ -572,29 +580,29 @@ def _match_library_tracks(self, library, keywords):
else:
# Use the popularity filter
self._log.debug(
- 'Most popular track chosen, count: {}',
+ "Most popular track chosen, count: {}",
len(response_data_tracks),
)
chosen_result = max(
- response_data_tracks, key=lambda x: x['popularity']
+ response_data_tracks, key=lambda x: x["popularity"]
)
results.append(chosen_result)
failure_count = len(failures)
if failure_count > 0:
- if self.config['show_failures'].get():
+ if self.config["show_failures"].get():
self._log.info(
- '{} track(s) did not match a {} ID:',
+ "{} track(s) did not match a {} ID:",
failure_count,
self.data_source,
)
for track in failures:
- self._log.info('track: {}', track)
- self._log.info('')
+ self._log.info("track: {}", track)
+ self._log.info("")
else:
self._log.warning(
- '{} track(s) did not match a {} ID:\n'
- 'use --show-failures to display',
+ "{} track(s) did not match a {} ID:\n"
+ "use --show-failures to display",
failure_count,
self.data_source,
)
@@ -610,14 +618,14 @@ def _output_match_results(self, results):
:type results: list[dict]
"""
if results:
- spotify_ids = [track_data['id'] for track_data in results]
- if self.config['mode'].get() == 'open':
+ spotify_ids = [track_data["id"] for track_data in results]
+ if self.config["mode"].get() == "open":
self._log.info(
- 'Attempting to open {} with playlist'.format(
+ "Attempting to open {} with playlist".format(
self.data_source
)
)
- spotify_url = 'spotify:trackset:Playlist:' + ','.join(
+ spotify_url = "spotify:trackset:Playlist:" + ",".join(
spotify_ids
)
webbrowser.open(spotify_url)
@@ -626,42 +634,42 @@ def _output_match_results(self, results):
print(self.open_track_url + spotify_id)
else:
self._log.warning(
- f'No {self.data_source} tracks found from beets query'
+ f"No {self.data_source} tracks found from beets query"
)
def _fetch_info(self, items, write, force):
"""Obtain track information from Spotify."""
- self._log.debug('Total {} tracks', len(items))
+ self._log.debug("Total {} tracks", len(items))
for index, item in enumerate(items, start=1):
- self._log.info('Processing {}/{} tracks - {} ',
- index, len(items), item)
+ self._log.info(
+ "Processing {}/{} tracks - {} ", index, len(items), item
+ )
# If we're not forcing re-downloading for all tracks, check
# whether the popularity data is already present
if not force:
- if 'spotify_track_popularity' in item:
- self._log.debug('Popularity already present for: {}',
- item)
+ if "spotify_track_popularity" in item:
+ self._log.debug("Popularity already present for: {}", item)
continue
try:
spotify_track_id = item.spotify_track_id
except AttributeError:
- self._log.debug('No track_id present for: {}', item)
+ self._log.debug("No track_id present for: {}", item)
continue
popularity = self.track_popularity(spotify_track_id)
- item['spotify_track_popularity'] = popularity
- audio_features = \
- self.track_audio_features(spotify_track_id)
+ item["spotify_track_popularity"] = popularity
+ audio_features = self.track_audio_features(spotify_track_id)
if audio_features is None:
- self._log.info('No audio features found for: {}', item)
+ self._log.info("No audio features found for: {}", item)
continue
for feature in audio_features.keys():
if feature in self.spotify_audio_features.keys():
- item[self.spotify_audio_features[feature]] = \
- audio_features[feature]
- item['spotify_updated'] = time.time()
+ item[self.spotify_audio_features[feature]] = audio_features[
+ feature
+ ]
+ item["spotify_updated"] = time.time()
item.store()
if write:
item.try_write()
@@ -671,14 +679,15 @@ def track_popularity(self, track_id=None):
track_data = self._handle_response(
requests.get, self.track_url + track_id
)
- self._log.debug('track_data: {}', track_data.get('popularity'))
- return track_data.get('popularity')
+ self._log.debug("track_data: {}", track_data.get("popularity"))
+ return track_data.get("popularity")
def track_audio_features(self, track_id=None):
"""Fetch track audio features by its Spotify ID."""
try:
return self._handle_response(
- requests.get, self.audio_features_url + track_id)
+ requests.get, self.audio_features_url + track_id
+ )
except SpotifyAPIError as e:
- self._log.debug('Spotify API error: {}', e)
+ self._log.debug("Spotify API error: {}", e)
return None
diff --git a/beetsplug/subsonicplaylist.py b/beetsplug/subsonicplaylist.py
index ead78919e2..a603689162 100644
--- a/beetsplug/subsonicplaylist.py
+++ b/beetsplug/subsonicplaylist.py
@@ -15,9 +15,9 @@
import random
import string
-from xml.etree import ElementTree
from hashlib import md5
from urllib.parse import urlencode
+from xml.etree import ElementTree
import requests
@@ -26,7 +26,7 @@
from beets.plugins import BeetsPlugin
from beets.ui import Subcommand
-__author__ = 'https://github.com/MrNuggelz'
+__author__ = "https://github.com/MrNuggelz"
def filter_to_be_removed(items, keys):
@@ -34,17 +34,22 @@ def filter_to_be_removed(items, keys):
dont_remove = []
for artist, album, title in keys:
for item in items:
- if artist == item['artist'] and \
- album == item['album'] and \
- title == item['title']:
+ if (
+ artist == item["artist"]
+ and album == item["album"]
+ and title == item["title"]
+ ):
dont_remove.append(item)
return [item for item in items if item not in dont_remove]
else:
+
def to_be_removed(item):
for artist, album, title in keys:
- if artist == item['artist'] and\
- album == item['album'] and\
- title == item['title']:
+ if (
+ artist == item["artist"]
+ and album == item["album"]
+ and title == item["title"]
+ ):
return False
return True
@@ -52,111 +57,120 @@ def to_be_removed(item):
class SubsonicPlaylistPlugin(BeetsPlugin):
-
def __init__(self):
super().__init__()
self.config.add(
{
- 'delete': False,
- 'playlist_ids': [],
- 'playlist_names': [],
- 'username': '',
- 'password': ''
+ "delete": False,
+ "playlist_ids": [],
+ "playlist_names": [],
+ "username": "",
+ "password": "",
}
)
- self.config['password'].redact = True
+ self.config["password"].redact = True
def update_tags(self, playlist_dict, lib):
with lib.transaction():
for query, playlist_tag in playlist_dict.items():
- query = AndQuery([MatchQuery("artist", query[0]),
- MatchQuery("album", query[1]),
- MatchQuery("title", query[2])])
+ query = AndQuery(
+ [
+ MatchQuery("artist", query[0]),
+ MatchQuery("album", query[1]),
+ MatchQuery("title", query[2]),
+ ]
+ )
items = lib.items(query)
if not items:
- self._log.warn("{} | track not found ({})", playlist_tag,
- query)
+ self._log.warn(
+ "{} | track not found ({})", playlist_tag, query
+ )
continue
for item in items:
item.subsonic_playlist = playlist_tag
item.try_sync(write=True, move=False)
def get_playlist(self, playlist_id):
- xml = self.send('getPlaylist', {'id': playlist_id}).text
+ xml = self.send("getPlaylist", {"id": playlist_id}).text
playlist = ElementTree.fromstring(xml)[0]
- if playlist.attrib.get('code', '200') != '200':
- alt_error = 'error getting playlist, but no error message found'
- self._log.warn(playlist.attrib.get('message', alt_error))
+ if playlist.attrib.get("code", "200") != "200":
+ alt_error = "error getting playlist, but no error message found"
+ self._log.warn(playlist.attrib.get("message", alt_error))
return
- name = playlist.attrib.get('name', 'undefined')
- tracks = [(t.attrib['artist'], t.attrib['album'], t.attrib['title'])
- for t in playlist]
+ name = playlist.attrib.get("name", "undefined")
+ tracks = [
+ (t.attrib["artist"], t.attrib["album"], t.attrib["title"])
+ for t in playlist
+ ]
return name, tracks
def commands(self):
def build_playlist(lib, opts, args):
self.config.set_args(opts)
- ids = self.config['playlist_ids'].as_str_seq()
- if self.config['playlist_names'].as_str_seq():
+ ids = self.config["playlist_ids"].as_str_seq()
+ if self.config["playlist_names"].as_str_seq():
playlists = ElementTree.fromstring(
- self.send('getPlaylists').text)[0]
- if playlists.attrib.get('code', '200') != '200':
- alt_error = 'error getting playlists,' \
- ' but no error message found'
- self._log.warn(
- playlists.attrib.get('message', alt_error))
+ self.send("getPlaylists").text
+ )[0]
+ if playlists.attrib.get("code", "200") != "200":
+ alt_error = (
+ "error getting playlists," " but no error message found"
+ )
+ self._log.warn(playlists.attrib.get("message", alt_error))
return
- for name in self.config['playlist_names'].as_str_seq():
+ for name in self.config["playlist_names"].as_str_seq():
for playlist in playlists:
- if name == playlist.attrib['name']:
- ids.append(playlist.attrib['id'])
+ if name == playlist.attrib["name"]:
+ ids.append(playlist.attrib["id"])
playlist_dict = self.get_playlists(ids)
# delete old tags
- if self.config['delete']:
+ if self.config["delete"]:
existing = list(lib.items('subsonic_playlist:";"'))
to_be_removed = filter_to_be_removed(
- existing,
- playlist_dict.keys())
+ existing, playlist_dict.keys()
+ )
for item in to_be_removed:
- item['subsonic_playlist'] = ''
+ item["subsonic_playlist"] = ""
with lib.transaction():
item.try_sync(write=True, move=False)
self.update_tags(playlist_dict, lib)
subsonicplaylist_cmds = Subcommand(
- 'subsonicplaylist', help='import a subsonic playlist'
+ "subsonicplaylist", help="import a subsonic playlist"
)
subsonicplaylist_cmds.parser.add_option(
- '-d',
- '--delete',
- action='store_true',
- help='delete tag from items not in any playlist anymore',
+ "-d",
+ "--delete",
+ action="store_true",
+ help="delete tag from items not in any playlist anymore",
)
subsonicplaylist_cmds.func = build_playlist
return [subsonicplaylist_cmds]
def generate_token(self):
- salt = ''.join(random.choices(string.ascii_lowercase + string.digits))
- return md5(
- (self.config['password'].get() + salt).encode()).hexdigest(), salt
+ salt = "".join(random.choices(string.ascii_lowercase + string.digits))
+ return (
+ md5((self.config["password"].get() + salt).encode()).hexdigest(),
+ salt,
+ )
def send(self, endpoint, params=None):
if params is None:
params = {}
a, b = self.generate_token()
- params['u'] = self.config['username']
- params['t'] = a
- params['s'] = b
- params['v'] = '1.12.0'
- params['c'] = 'beets'
- resp = requests.get('{}/rest/{}?{}'.format(
- self.config['base_url'].get(),
- endpoint,
- urlencode(params))
+ params["u"] = self.config["username"]
+ params["t"] = a
+ params["s"] = b
+ params["v"] = "1.12.0"
+ params["c"] = "beets"
+ resp = requests.get(
+ "{}/rest/{}?{}".format(
+ self.config["base_url"].get(), endpoint, urlencode(params)
+ )
)
return resp
@@ -166,6 +180,6 @@ def get_playlists(self, ids):
name, tracks = self.get_playlist(playlist_id)
for track in tracks:
if track not in output:
- output[track] = ';'
- output[track] += name + ';'
+ output[track] = ";"
+ output[track] += name + ";"
return output
diff --git a/beetsplug/subsonicupdate.py b/beetsplug/subsonicupdate.py
index 4e32f4d303..0c8446c980 100644
--- a/beetsplug/subsonicupdate.py
+++ b/beetsplug/subsonicupdate.py
@@ -32,35 +32,37 @@
import hashlib
import random
import string
+from binascii import hexlify
import requests
-from binascii import hexlify
from beets import config
from beets.plugins import BeetsPlugin
-__author__ = 'https://github.com/maffo999'
+__author__ = "https://github.com/maffo999"
class SubsonicUpdate(BeetsPlugin):
def __init__(self):
super().__init__()
# Set default configuration values
- config['subsonic'].add({
- 'user': 'admin',
- 'pass': 'admin',
- 'url': 'http://localhost:4040',
- 'auth': 'token',
- })
- config['subsonic']['pass'].redact = True
- self.register_listener('database_change', self.db_change)
- self.register_listener('smartplaylist_update', self.spl_update)
+ config["subsonic"].add(
+ {
+ "user": "admin",
+ "pass": "admin",
+ "url": "http://localhost:4040",
+ "auth": "token",
+ }
+ )
+ config["subsonic"]["pass"].redact = True
+ self.register_listener("database_change", self.db_change)
+ self.register_listener("smartplaylist_update", self.spl_update)
def db_change(self, lib, model):
- self.register_listener('cli_exit', self.start_scan)
+ self.register_listener("cli_exit", self.start_scan)
def spl_update(self):
- self.register_listener('cli_exit', self.start_scan)
+ self.register_listener("cli_exit", self.start_scan)
@staticmethod
def __create_token():
@@ -68,13 +70,13 @@ def __create_token():
:return: The generated salt and hashed token
"""
- password = config['subsonic']['pass'].as_str()
+ password = config["subsonic"]["pass"].as_str()
# Pick the random sequence and salt the password
r = string.ascii_letters + string.digits
salt = "".join([random.choice(r) for _ in range(6)])
salted_password = password + salt
- token = hashlib.md5(salted_password.encode('utf-8')).hexdigest()
+ token = hashlib.md5(salted_password.encode("utf-8")).hexdigest()
# Put together the payload of the request to the server and the URL
return salt, token
@@ -88,47 +90,47 @@ def __format_url(endpoint):
:return: Endpoint for updating Subsonic
"""
- url = config['subsonic']['url'].as_str()
- if url and url.endswith('/'):
+ url = config["subsonic"]["url"].as_str()
+ if url and url.endswith("/"):
url = url[:-1]
# @deprecated("Use url config option instead")
if not url:
- host = config['subsonic']['host'].as_str()
- port = config['subsonic']['port'].get(int)
- context_path = config['subsonic']['contextpath'].as_str()
- if context_path == '/':
- context_path = ''
+ host = config["subsonic"]["host"].as_str()
+ port = config["subsonic"]["port"].get(int)
+ context_path = config["subsonic"]["contextpath"].as_str()
+ if context_path == "/":
+ context_path = ""
url = f"http://{host}:{port}{context_path}"
- return url + f'/rest/{endpoint}'
+ return url + f"/rest/{endpoint}"
def start_scan(self):
- user = config['subsonic']['user'].as_str()
- auth = config['subsonic']['auth'].as_str()
+ user = config["subsonic"]["user"].as_str()
+ auth = config["subsonic"]["auth"].as_str()
url = self.__format_url("startScan")
- self._log.debug('URL is {0}', url)
- self._log.debug('auth type is {0}', config['subsonic']['auth'])
+ self._log.debug("URL is {0}", url)
+ self._log.debug("auth type is {0}", config["subsonic"]["auth"])
if auth == "token":
salt, token = self.__create_token()
payload = {
- 'u': user,
- 't': token,
- 's': salt,
- 'v': '1.13.0', # Subsonic 5.3 and newer
- 'c': 'beets',
- 'f': 'json'
+ "u": user,
+ "t": token,
+ "s": salt,
+ "v": "1.13.0", # Subsonic 5.3 and newer
+ "c": "beets",
+ "f": "json",
}
elif auth == "password":
- password = config['subsonic']['pass'].as_str()
+ password = config["subsonic"]["pass"].as_str()
encpass = hexlify(password.encode()).decode()
payload = {
- 'u': user,
- 'p': f'enc:{encpass}',
- 'v': '1.12.0',
- 'c': 'beets',
- 'f': 'json'
+ "u": user,
+ "p": f"enc:{encpass}",
+ "v": "1.12.0",
+ "c": "beets",
+ "f": "json",
}
else:
return
@@ -136,16 +138,19 @@ def start_scan(self):
response = requests.get(url, params=payload)
json = response.json()
- if response.status_code == 200 and \
- json['subsonic-response']['status'] == "ok":
- count = json['subsonic-response']['scanStatus']['count']
- self._log.info(
- f'Updating Subsonic; scanning {count} tracks')
- elif response.status_code == 200 and \
- json['subsonic-response']['status'] == "failed":
- error_message = json['subsonic-response']['error']['message']
- self._log.error(f'Error: {error_message}')
+ if (
+ response.status_code == 200
+ and json["subsonic-response"]["status"] == "ok"
+ ):
+ count = json["subsonic-response"]["scanStatus"]["count"]
+ self._log.info(f"Updating Subsonic; scanning {count} tracks")
+ elif (
+ response.status_code == 200
+ and json["subsonic-response"]["status"] == "failed"
+ ):
+ error_message = json["subsonic-response"]["error"]["message"]
+ self._log.error(f"Error: {error_message}")
else:
- self._log.error('Error: {0}', json)
+ self._log.error("Error: {0}", json)
except Exception as error:
- self._log.error(f'Error: {error}')
+ self._log.error(f"Error: {error}")
diff --git a/beetsplug/substitute.py b/beetsplug/substitute.py
index 566dd974c4..87db2a45d0 100644
--- a/beetsplug/substitute.py
+++ b/beetsplug/substitute.py
@@ -18,6 +18,7 @@
"""
import re
+
from beets.plugins import BeetsPlugin
@@ -28,6 +29,7 @@ class Substitute(BeetsPlugin):
given substitution rules. ``rules`` must be a list of (pattern,
replacement) pairs.
"""
+
def tmpl_substitute(self, text):
"""Do the actual replacing."""
if text:
@@ -36,7 +38,7 @@ def tmpl_substitute(self, text):
return replacement
return text
else:
- return u''
+ return ""
def __init__(self):
"""Initialize the substitute plugin.
@@ -46,7 +48,7 @@ def __init__(self):
"""
super(Substitute, self).__init__()
self.substitute_rules = []
- self.template_funcs['substitute'] = self.tmpl_substitute
+ self.template_funcs["substitute"] = self.tmpl_substitute
for key, view in self.config.items():
value = view.as_str()
diff --git a/beetsplug/the.py b/beetsplug/the.py
index e6626d2b2d..2deab9cd50 100644
--- a/beetsplug/the.py
+++ b/beetsplug/the.py
@@ -16,50 +16,54 @@
import re
+
from beets.plugins import BeetsPlugin
-__author__ = 'baobab@heresiarch.info'
-__version__ = '1.1'
+__author__ = "baobab@heresiarch.info"
+__version__ = "1.1"
-PATTERN_THE = '^the\\s'
-PATTERN_A = '^[a][n]?\\s'
-FORMAT = '{0}, {1}'
+PATTERN_THE = "^the\\s"
+PATTERN_A = "^[a][n]?\\s"
+FORMAT = "{0}, {1}"
class ThePlugin(BeetsPlugin):
-
patterns = []
def __init__(self):
super().__init__()
- self.template_funcs['the'] = self.the_template_func
+ self.template_funcs["the"] = self.the_template_func
- self.config.add({
- 'the': True,
- 'a': True,
- 'format': '{0}, {1}',
- 'strip': False,
- 'patterns': [],
- })
+ self.config.add(
+ {
+ "the": True,
+ "a": True,
+ "format": "{0}, {1}",
+ "strip": False,
+ "patterns": [],
+ }
+ )
- self.patterns = self.config['patterns'].as_str_seq()
+ self.patterns = self.config["patterns"].as_str_seq()
for p in self.patterns:
if p:
try:
re.compile(p)
except re.error:
- self._log.error('invalid pattern: {0}', p)
+ self._log.error("invalid pattern: {0}", p)
else:
- if not (p.startswith('^') or p.endswith('$')):
- self._log.warning('warning: \"{0}\" will not '
- 'match string start/end', p)
- if self.config['a']:
+ if not (p.startswith("^") or p.endswith("$")):
+ self._log.warning(
+ 'warning: "{0}" will not ' "match string start/end",
+ p,
+ )
+ if self.config["a"]:
self.patterns = [PATTERN_A] + self.patterns
- if self.config['the']:
+ if self.config["the"]:
self.patterns = [PATTERN_THE] + self.patterns
if not self.patterns:
- self._log.warning('no patterns defined!')
+ self._log.warning("no patterns defined!")
def unthe(self, text, pattern):
"""Moves pattern in the path format string or strips it
@@ -75,14 +79,14 @@ def unthe(self, text, pattern):
except IndexError:
return text
else:
- r = re.sub(r, '', text).strip()
- if self.config['strip']:
+ r = re.sub(r, "", text).strip()
+ if self.config["strip"]:
return r
else:
- fmt = self.config['format'].as_str()
+ fmt = self.config["format"].as_str()
return fmt.format(r, t.strip()).strip()
else:
- return ''
+ return ""
def the_template_func(self, text):
if not self.patterns:
@@ -91,8 +95,8 @@ def the_template_func(self, text):
for p in self.patterns:
r = self.unthe(text, p)
if r != text:
- self._log.debug('\"{0}\" -> \"{1}\"', text, r)
+ self._log.debug('"{0}" -> "{1}"', text, r)
break
return r
else:
- return ''
+ return ""
diff --git a/beetsplug/thumbnails.py b/beetsplug/thumbnails.py
index cd771034db..3766d69d09 100644
--- a/beetsplug/thumbnails.py
+++ b/beetsplug/thumbnails.py
@@ -19,22 +19,21 @@
"""
-from hashlib import md5
+import ctypes
+import ctypes.util
import os
import shutil
+from hashlib import md5
from pathlib import PurePosixPath
-import ctypes
-import ctypes.util
from xdg import BaseDirectory
+from beets import util
from beets.plugins import BeetsPlugin
from beets.ui import Subcommand, decargs
-from beets import util
from beets.util import bytestring_path, displayable_path, syspath
from beets.util.artresizer import ArtResizer
-
BASE_DIR = os.path.join(BaseDirectory.xdg_cache_home, "thumbnails")
NORMAL_DIR = bytestring_path(os.path.join(BASE_DIR, "normal"))
LARGE_DIR = bytestring_path(os.path.join(BASE_DIR, "large"))
@@ -43,26 +42,37 @@
class ThumbnailsPlugin(BeetsPlugin):
def __init__(self):
super().__init__()
- self.config.add({
- 'auto': True,
- 'force': False,
- 'dolphin': False,
- })
+ self.config.add(
+ {
+ "auto": True,
+ "force": False,
+ "dolphin": False,
+ }
+ )
- if self.config['auto'] and self._check_local_ok():
- self.register_listener('art_set', self.process_album)
+ if self.config["auto"] and self._check_local_ok():
+ self.register_listener("art_set", self.process_album)
def commands(self):
- thumbnails_command = Subcommand("thumbnails",
- help="Create album thumbnails")
+ thumbnails_command = Subcommand(
+ "thumbnails", help="Create album thumbnails"
+ )
thumbnails_command.parser.add_option(
- '-f', '--force',
- dest='force', action='store_true', default=False,
- help='force regeneration of thumbnails deemed fine (existing & '
- 'recent enough)')
+ "-f",
+ "--force",
+ dest="force",
+ action="store_true",
+ default=False,
+ help="force regeneration of thumbnails deemed fine (existing & "
+ "recent enough)",
+ )
thumbnails_command.parser.add_option(
- '--dolphin', dest='dolphin', action='store_true', default=False,
- help="create Dolphin-compatible thumbnail information (for KDE)")
+ "--dolphin",
+ dest="dolphin",
+ action="store_true",
+ default=False,
+ help="create Dolphin-compatible thumbnail information (for KDE)",
+ )
thumbnails_command.func = self.process_query
return [thumbnails_command]
@@ -75,14 +85,16 @@ def process_query(self, lib, opts, args):
def _check_local_ok(self):
"""Check that everything is ready:
- - local capability to resize images
- - thumbnail dirs exist (create them if needed)
- - detect whether we'll use PIL or IM
- - detect whether we'll use GIO or Python to get URIs
+ - local capability to resize images
+ - thumbnail dirs exist (create them if needed)
+ - detect whether we'll use PIL or IM
+ - detect whether we'll use GIO or Python to get URIs
"""
if not ArtResizer.shared.local:
- self._log.warning("No local image resizing capabilities, "
- "cannot generate thumbnails")
+ self._log.warning(
+ "No local image resizing capabilities, "
+ "cannot generate thumbnails"
+ )
return False
for dir in (NORMAL_DIR, LARGE_DIR):
@@ -105,20 +117,20 @@ def _check_local_ok(self):
return True
def process_album(self, album):
- """Produce thumbnails for the album folder.
- """
- self._log.debug('generating thumbnail for {0}', album)
+ """Produce thumbnails for the album folder."""
+ self._log.debug("generating thumbnail for {0}", album)
if not album.artpath:
- self._log.info('album {0} has no art', album)
+ self._log.info("album {0} has no art", album)
return
- if self.config['dolphin']:
+ if self.config["dolphin"]:
self.make_dolphin_cover_thumbnail(album)
size = ArtResizer.shared.get_size(album.artpath)
if not size:
- self._log.warning('problem getting the picture size for {0}',
- album.artpath)
+ self._log.warning(
+ "problem getting the picture size for {0}", album.artpath
+ )
return
wrote = True
@@ -127,9 +139,9 @@ def process_album(self, album):
wrote &= self.make_cover_thumbnail(album, 128, NORMAL_DIR)
if wrote:
- self._log.info('wrote thumbnail for {0}', album)
+ self._log.info("wrote thumbnail for {0}", album)
else:
- self._log.info('nothing to do for {0}', album)
+ self._log.info("nothing to do for {0}", album)
def make_cover_thumbnail(self, album, size, target_dir):
"""Make a thumbnail of given size for `album` and put it in
@@ -137,15 +149,24 @@ def make_cover_thumbnail(self, album, size, target_dir):
"""
target = os.path.join(target_dir, self.thumbnail_file_name(album.path))
- if (os.path.exists(syspath(target))
- and os.stat(syspath(target)).st_mtime
- > os.stat(syspath(album.artpath)).st_mtime):
- if self.config['force']:
- self._log.debug("found a suitable {1}x{1} thumbnail for {0}, "
- "forcing regeneration", album, size)
+ if (
+ os.path.exists(syspath(target))
+ and os.stat(syspath(target)).st_mtime
+ > os.stat(syspath(album.artpath)).st_mtime
+ ):
+ if self.config["force"]:
+ self._log.debug(
+ "found a suitable {1}x{1} thumbnail for {0}, "
+ "forcing regeneration",
+ album,
+ size,
+ )
else:
- self._log.debug("{1}x{1} thumbnail for {0} exists and is "
- "recent enough", album, size)
+ self._log.debug(
+ "{1}x{1} thumbnail for {0} exists and is " "recent enough",
+ album,
+ size,
+ )
return False
resized = ArtResizer.shared.resize(size, album.artpath, target)
self.add_tags(album, resized)
@@ -157,7 +178,7 @@ def thumbnail_file_name(self, path):
See https://standards.freedesktop.org/thumbnail-spec/latest/x227.html
"""
uri = self.get_uri(path)
- hash = md5(uri.encode('utf-8')).hexdigest()
+ hash = md5(uri.encode("utf-8")).hexdigest()
return bytestring_path(f"{hash}.png")
def add_tags(self, album, image_path):
@@ -165,22 +186,25 @@ def add_tags(self, album, image_path):
See https://standards.freedesktop.org/thumbnail-spec/latest/x142.html
"""
mtime = os.stat(syspath(album.artpath)).st_mtime
- metadata = {"Thumb::URI": self.get_uri(album.artpath),
- "Thumb::MTime": str(mtime)}
+ metadata = {
+ "Thumb::URI": self.get_uri(album.artpath),
+ "Thumb::MTime": str(mtime),
+ }
try:
ArtResizer.shared.write_metadata(image_path, metadata)
except Exception:
- self._log.exception("could not write metadata to {0}",
- displayable_path(image_path))
+ self._log.exception(
+ "could not write metadata to {0}", displayable_path(image_path)
+ )
def make_dolphin_cover_thumbnail(self, album):
outfilename = os.path.join(album.path, b".directory")
if os.path.exists(syspath(outfilename)):
return
artfile = os.path.split(album.artpath)[1]
- with open(syspath(outfilename), 'w') as f:
- f.write('[Desktop Entry]\n')
- f.write('Icon=./{}'.format(artfile.decode('utf-8')))
+ with open(syspath(outfilename), "w") as f:
+ f.write("[Desktop Entry]\n")
+ f.write("Icon=./{}".format(artfile.decode("utf-8")))
f.close()
self._log.debug("Wrote file {0}", displayable_path(outfilename))
@@ -209,12 +233,12 @@ def copy_c_string(c_string):
# work. A more surefire way would be to allocate a ctypes buffer and copy
# the data with `memcpy` or somesuch.
s = ctypes.cast(c_string, ctypes.c_char_p).value
- return b'' + s
+ return b"" + s
class GioURI(URIGetter):
- """Use gio URI function g_file_get_uri. Paths must be utf-8 encoded.
- """
+ """Use gio URI function g_file_get_uri. Paths must be utf-8 encoded."""
+
name = "GIO"
def __init__(self):
@@ -243,8 +267,11 @@ def get_library(self):
def uri(self, path):
g_file_ptr = self.libgio.g_file_new_for_path(path)
if not g_file_ptr:
- raise RuntimeError("No gfile pointer received for {}".format(
- displayable_path(path)))
+ raise RuntimeError(
+ "No gfile pointer received for {}".format(
+ displayable_path(path)
+ )
+ )
try:
uri_ptr = self.libgio.g_file_get_uri(g_file_ptr)
@@ -252,8 +279,10 @@ def uri(self, path):
self.libgio.g_object_unref(g_file_ptr)
if not uri_ptr:
self.libgio.g_free(uri_ptr)
- raise RuntimeError("No URI received from the gfile pointer for "
- "{}".format(displayable_path(path)))
+ raise RuntimeError(
+ "No URI received from the gfile pointer for "
+ "{}".format(displayable_path(path))
+ )
try:
uri = copy_c_string(uri_ptr)
@@ -263,6 +292,4 @@ def uri(self, path):
try:
return uri.decode(util._fsencoding())
except UnicodeDecodeError:
- raise RuntimeError(
- f"Could not decode filename from GIO: {uri!r}"
- )
+ raise RuntimeError(f"Could not decode filename from GIO: {uri!r}")
diff --git a/beetsplug/types.py b/beetsplug/types.py
index 930d5e869f..9ba3aac663 100644
--- a/beetsplug/types.py
+++ b/beetsplug/types.py
@@ -13,14 +13,14 @@
# included in all copies or substantial portions of the Software.
-from beets.plugins import BeetsPlugin
-from beets.dbcore import types
from confuse import ConfigValueError
+
from beets import library
+from beets.dbcore import types
+from beets.plugins import BeetsPlugin
class TypesPlugin(BeetsPlugin):
-
@property
def item_types(self):
return self._types()
@@ -35,16 +35,16 @@ def _types(self):
mytypes = {}
for key, value in self.config.items():
- if value.get() == 'int':
+ if value.get() == "int":
mytypes[key] = types.INTEGER
- elif value.get() == 'float':
+ elif value.get() == "float":
mytypes[key] = types.FLOAT
- elif value.get() == 'bool':
+ elif value.get() == "bool":
mytypes[key] = types.BOOLEAN
- elif value.get() == 'date':
+ elif value.get() == "date":
mytypes[key] = library.DateType()
else:
raise ConfigValueError(
- "unknown type '{}' for the '{}' field"
- .format(value, key))
+ "unknown type '{}' for the '{}' field".format(value, key)
+ )
return mytypes
diff --git a/beetsplug/unimported.py b/beetsplug/unimported.py
index 4a238531d9..278fd667cb 100644
--- a/beetsplug/unimported.py
+++ b/beetsplug/unimported.py
@@ -23,24 +23,18 @@
from beets.plugins import BeetsPlugin
from beets.ui import Subcommand, print_
-__author__ = 'https://github.com/MrNuggelz'
+__author__ = "https://github.com/MrNuggelz"
class Unimported(BeetsPlugin):
-
def __init__(self):
super().__init__()
- self.config.add(
- {
- 'ignore_extensions': [],
- 'ignore_subdirectories': []
- }
- )
+ self.config.add({"ignore_extensions": [], "ignore_subdirectories": []})
def commands(self):
def print_unimported(lib, opts, args):
ignore_exts = [
- ('.' + x).encode()
+ ("." + x).encode()
for x in self.config["ignore_extensions"].as_str_seq()
]
ignore_dirs = [
@@ -62,8 +56,9 @@ def print_unimported(lib, opts, args):
print_(util.displayable_path(f))
unimported = Subcommand(
- 'unimported',
- help='list all files in the library folder which are not listed'
- ' in the beets library database')
+ "unimported",
+ help="list all files in the library folder which are not listed"
+ " in the beets library database",
+ )
unimported.func = print_unimported
return [unimported]
diff --git a/beetsplug/web/__init__.py b/beetsplug/web/__init__.py
index a5f26d8505..cebb0be0a5 100644
--- a/beetsplug/web/__init__.py
+++ b/beetsplug/web/__init__.py
@@ -14,21 +14,22 @@
"""A Web interface to beets."""
-from beets.plugins import BeetsPlugin
-from beets import ui
-from beets import util
-import beets.library
+import base64
+import json
+import os
+
import flask
from flask import g, jsonify
-from werkzeug.routing import BaseConverter, PathConverter
-import os
from unidecode import unidecode
-import json
-import base64
+from werkzeug.routing import BaseConverter, PathConverter
+import beets.library
+from beets import ui, util
+from beets.plugins import BeetsPlugin
# Utilities.
+
def _rep(obj, expand=False):
"""Get a flat -- i.e., JSON-ish -- representation of a beets Item or
Album object. For Albums, `expand` dictates whether tracks are
@@ -37,32 +38,32 @@ def _rep(obj, expand=False):
out = dict(obj)
if isinstance(obj, beets.library.Item):
- if app.config.get('INCLUDE_PATHS', False):
- out['path'] = util.displayable_path(out['path'])
+ if app.config.get("INCLUDE_PATHS", False):
+ out["path"] = util.displayable_path(out["path"])
else:
- del out['path']
+ del out["path"]
# Filter all bytes attributes and convert them to strings.
for key, value in out.items():
if isinstance(out[key], bytes):
- out[key] = base64.b64encode(value).decode('ascii')
+ out[key] = base64.b64encode(value).decode("ascii")
# Get the size (in bytes) of the backing file. This is useful
# for the Tomahawk resolver API.
try:
- out['size'] = os.path.getsize(util.syspath(obj.path))
+ out["size"] = os.path.getsize(util.syspath(obj.path))
except OSError:
- out['size'] = 0
+ out["size"] = 0
return out
elif isinstance(obj, beets.library.Album):
- if app.config.get('INCLUDE_PATHS', False):
- out['artpath'] = util.displayable_path(out['artpath'])
+ if app.config.get("INCLUDE_PATHS", False):
+ out["artpath"] = util.displayable_path(out["artpath"])
else:
- del out['artpath']
+ del out["artpath"]
if expand:
- out['items'] = [_rep(item) for item in obj.items()]
+ out["items"] = [_rep(item) for item in obj.items()]
return out
@@ -81,15 +82,15 @@ def json_generator(items, root, expand=False):
if first:
first = False
else:
- yield ','
+ yield ","
yield json.dumps(_rep(item, expand=expand))
- yield ']}'
+ yield "]}"
def is_expand():
"""Returns whether the current request is for an expanded response."""
- return flask.request.args.get('expand') is not None
+ return flask.request.args.get("expand") is not None
def is_delete():
@@ -97,7 +98,7 @@ def is_delete():
files.
"""
- return flask.request.args.get('delete') is not None
+ return flask.request.args.get("delete") is not None
def get_method():
@@ -106,25 +107,24 @@ def get_method():
def resource(name, patchable=False):
- """Decorates a function to handle RESTful HTTP requests for a resource.
- """
+ """Decorates a function to handle RESTful HTTP requests for a resource."""
+
def make_responder(retriever):
def responder(ids):
entities = [retriever(id) for id in ids]
entities = [entity for entity in entities if entity]
if get_method() == "DELETE":
-
- if app.config.get('READONLY', True):
+ if app.config.get("READONLY", True):
return flask.abort(405)
for entity in entities:
entity.remove(delete=is_delete())
- return flask.make_response(jsonify({'deleted': True}), 200)
+ return flask.make_response(jsonify({"deleted": True}), 200)
elif get_method() == "PATCH" and patchable:
- if app.config.get('READONLY', True):
+ if app.config.get("READONLY", True):
return flask.abort(405)
for entity in entities:
@@ -136,7 +136,7 @@ def responder(ids):
elif entities:
return app.response_class(
json_generator(entities, root=name),
- mimetype='application/json'
+ mimetype="application/json",
)
elif get_method() == "GET":
@@ -145,7 +145,7 @@ def responder(ids):
elif entities:
return app.response_class(
json_generator(entities, root=name),
- mimetype='application/json'
+ mimetype="application/json",
)
else:
return flask.abort(404)
@@ -153,31 +153,31 @@ def responder(ids):
else:
return flask.abort(405)
- responder.__name__ = f'get_{name}'
+ responder.__name__ = f"get_{name}"
return responder
+
return make_responder
def resource_query(name, patchable=False):
- """Decorates a function to handle RESTful HTTP queries for resources.
- """
+ """Decorates a function to handle RESTful HTTP queries for resources."""
+
def make_responder(query_func):
def responder(queries):
entities = query_func(queries)
if get_method() == "DELETE":
-
- if app.config.get('READONLY', True):
+ if app.config.get("READONLY", True):
return flask.abort(405)
for entity in entities:
entity.remove(delete=is_delete())
- return flask.make_response(jsonify({'deleted': True}), 200)
+ return flask.make_response(jsonify({"deleted": True}), 200)
elif get_method() == "PATCH" and patchable:
- if app.config.get('READONLY', True):
+ if app.config.get("READONLY", True):
return flask.abort(405)
for entity in entities:
@@ -186,22 +186,21 @@ def responder(queries):
return app.response_class(
json_generator(entities, root=name),
- mimetype='application/json'
+ mimetype="application/json",
)
elif get_method() == "GET":
return app.response_class(
json_generator(
- entities,
- root='results', expand=is_expand()
+ entities, root="results", expand=is_expand()
),
- mimetype='application/json'
+ mimetype="application/json",
)
else:
return flask.abort(405)
- responder.__name__ = f'query_{name}'
+ responder.__name__ = f"query_{name}"
return responder
@@ -212,34 +211,39 @@ def resource_list(name):
"""Decorates a function to handle RESTful HTTP request for a list of
resources.
"""
+
def make_responder(list_all):
def responder():
return app.response_class(
json_generator(list_all(), root=name, expand=is_expand()),
- mimetype='application/json'
+ mimetype="application/json",
)
- responder.__name__ = f'all_{name}'
+
+ responder.__name__ = f"all_{name}"
return responder
+
return make_responder
def _get_unique_table_field_values(model, field, sort_field):
- """ retrieve all unique values belonging to a key from a model """
+ """retrieve all unique values belonging to a key from a model"""
if field not in model.all_keys() or sort_field not in model.all_keys():
raise KeyError
with g.lib.transaction() as tx:
- rows = tx.query('SELECT DISTINCT "{}" FROM "{}" ORDER BY "{}"'
- .format(field, model._table, sort_field))
+ rows = tx.query(
+ 'SELECT DISTINCT "{}" FROM "{}" ORDER BY "{}"'.format(
+ field, model._table, sort_field
+ )
+ )
return [row[0] for row in rows]
class IdListConverter(BaseConverter):
- """Converts comma separated lists of ids in urls to integer lists.
- """
+ """Converts comma separated lists of ids in urls to integer lists."""
def to_python(self, value):
ids = []
- for id in value.split(','):
+ for id in value.split(","):
try:
ids.append(int(id))
except ValueError:
@@ -247,63 +251,65 @@ def to_python(self, value):
return ids
def to_url(self, value):
- return ','.join(str(v) for v in value)
+ return ",".join(str(v) for v in value)
class QueryConverter(PathConverter):
- """Converts slash separated lists of queries in the url to string list.
- """
+ """Converts slash separated lists of queries in the url to string list."""
def to_python(self, value):
- queries = value.split('/')
+ queries = value.split("/")
"""Do not do path substitution on regex value tests"""
- return [query if '::' in query else query.replace('\\', os.sep)
- for query in queries]
+ return [
+ query if "::" in query else query.replace("\\", os.sep)
+ for query in queries
+ ]
def to_url(self, value):
- return '/'.join([v.replace(os.sep, '\\') for v in value])
+ return "/".join([v.replace(os.sep, "\\") for v in value])
class EverythingConverter(PathConverter):
part_isolating = False
- regex = '.*?'
+ regex = ".*?"
# Flask setup.
app = flask.Flask(__name__)
-app.url_map.converters['idlist'] = IdListConverter
-app.url_map.converters['query'] = QueryConverter
-app.url_map.converters['everything'] = EverythingConverter
+app.url_map.converters["idlist"] = IdListConverter
+app.url_map.converters["query"] = QueryConverter
+app.url_map.converters["everything"] = EverythingConverter
@app.before_request
def before_request():
- g.lib = app.config['lib']
+ g.lib = app.config["lib"]
# Items.
-@app.route('/item/', methods=["GET", "DELETE", "PATCH"])
-@resource('items', patchable=True)
+
+@app.route("/item/", methods=["GET", "DELETE", "PATCH"])
+@resource("items", patchable=True)
def get_item(id):
return g.lib.get_item(id)
-@app.route('/item/')
-@app.route('/item/query/')
-@resource_list('items')
+@app.route("/item/")
+@app.route("/item/query/")
+@resource_list("items")
def all_items():
return g.lib.items()
-@app.route('/item//file')
+@app.route("/item//file")
def item_file(item_id):
item = g.lib.get_item(item_id)
# On Windows under Python 2, Flask wants a Unicode path. On Python 3, it
# *always* wants a Unicode path.
- if os.name == 'nt':
+ if os.name == "nt":
item_path = util.syspath(item.path)
else:
item_path = util.py3_path(item.path)
@@ -328,23 +334,21 @@ def item_file(item_id):
safe_filename = unicode_base_filename
response = flask.send_file(
- item_path,
- as_attachment=True,
- download_name=safe_filename
+ item_path, as_attachment=True, download_name=safe_filename
)
- response.headers['Content-Length'] = os.path.getsize(item_path)
+ response.headers["Content-Length"] = os.path.getsize(item_path)
return response
-@app.route('/item/query/', methods=["GET", "DELETE", "PATCH"])
-@resource_query('items', patchable=True)
+@app.route("/item/query/", methods=["GET", "DELETE", "PATCH"])
+@resource_query("items", patchable=True)
def item_query(queries):
return g.lib.items(queries)
-@app.route('/item/path/')
+@app.route("/item/path/")
def item_at_path(path):
- query = beets.library.PathQuery('path', path.encode('utf-8'))
+ query = beets.library.PathQuery("path", path.encode("utf-8"))
item = g.lib.items(query).get()
if item:
return flask.jsonify(_rep(item))
@@ -352,12 +356,13 @@ def item_at_path(path):
return flask.abort(404)
-@app.route('/item/values/')
+@app.route("/item/values/")
def item_unique_field_values(key):
- sort_key = flask.request.args.get('sort_key', key)
+ sort_key = flask.request.args.get("sort_key", key)
try:
- values = _get_unique_table_field_values(beets.library.Item, key,
- sort_key)
+ values = _get_unique_table_field_values(
+ beets.library.Item, key, sort_key
+ )
except KeyError:
return flask.abort(404)
return flask.jsonify(values=values)
@@ -365,26 +370,27 @@ def item_unique_field_values(key):
# Albums.
-@app.route('/album/', methods=["GET", "DELETE"])
-@resource('albums')
+
+@app.route("/album/", methods=["GET", "DELETE"])
+@resource("albums")
def get_album(id):
return g.lib.get_album(id)
-@app.route('/album/')
-@app.route('/album/query/')
-@resource_list('albums')
+@app.route("/album/")
+@app.route("/album/query/")
+@resource_list("albums")
def all_albums():
return g.lib.albums()
-@app.route('/album/query/', methods=["GET", "DELETE"])
-@resource_query('albums')
+@app.route("/album/query/", methods=["GET", "DELETE"])
+@resource_query("albums")
def album_query(queries):
return g.lib.albums(queries)
-@app.route('/album//art')
+@app.route("/album//art")
def album_art(album_id):
album = g.lib.get_album(album_id)
if album and album.artpath:
@@ -393,12 +399,13 @@ def album_art(album_id):
return flask.abort(404)
-@app.route('/album/values/')
+@app.route("/album/values/")
def album_unique_field_values(key):
- sort_key = flask.request.args.get('sort_key', key)
+ sort_key = flask.request.args.get("sort_key", key)
try:
- values = _get_unique_table_field_values(beets.library.Album, key,
- sort_key)
+ values = _get_unique_table_field_values(
+ beets.library.Album, key, sort_key
+ )
except KeyError:
return flask.abort(404)
return flask.jsonify(values=values)
@@ -406,7 +413,8 @@ def album_unique_field_values(key):
# Artists.
-@app.route('/artist/')
+
+@app.route("/artist/")
def all_artists():
with g.lib.transaction() as tx:
rows = tx.query("SELECT DISTINCT albumartist FROM albums")
@@ -416,88 +424,106 @@ def all_artists():
# Library information.
-@app.route('/stats')
+
+@app.route("/stats")
def stats():
with g.lib.transaction() as tx:
item_rows = tx.query("SELECT COUNT(*) FROM items")
album_rows = tx.query("SELECT COUNT(*) FROM albums")
- return flask.jsonify({
- 'items': item_rows[0][0],
- 'albums': album_rows[0][0],
- })
+ return flask.jsonify(
+ {
+ "items": item_rows[0][0],
+ "albums": album_rows[0][0],
+ }
+ )
# UI.
-@app.route('/')
+
+@app.route("/")
def home():
- return flask.render_template('index.html')
+ return flask.render_template("index.html")
# Plugin hook.
+
class WebPlugin(BeetsPlugin):
def __init__(self):
super().__init__()
- self.config.add({
- 'host': '127.0.0.1',
- 'port': 8337,
- 'cors': '',
- 'cors_supports_credentials': False,
- 'reverse_proxy': False,
- 'include_paths': False,
- 'readonly': True,
- })
+ self.config.add(
+ {
+ "host": "127.0.0.1",
+ "port": 8337,
+ "cors": "",
+ "cors_supports_credentials": False,
+ "reverse_proxy": False,
+ "include_paths": False,
+ "readonly": True,
+ }
+ )
def commands(self):
- cmd = ui.Subcommand('web', help='start a Web interface')
- cmd.parser.add_option('-d', '--debug', action='store_true',
- default=False, help='debug mode')
+ cmd = ui.Subcommand("web", help="start a Web interface")
+ cmd.parser.add_option(
+ "-d",
+ "--debug",
+ action="store_true",
+ default=False,
+ help="debug mode",
+ )
def func(lib, opts, args):
args = ui.decargs(args)
if args:
- self.config['host'] = args.pop(0)
+ self.config["host"] = args.pop(0)
if args:
- self.config['port'] = int(args.pop(0))
+ self.config["port"] = int(args.pop(0))
- app.config['lib'] = lib
+ app.config["lib"] = lib
# Normalizes json output
- app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
+ app.config["JSONIFY_PRETTYPRINT_REGULAR"] = False
- app.config['INCLUDE_PATHS'] = self.config['include_paths']
- app.config['READONLY'] = self.config['readonly']
+ app.config["INCLUDE_PATHS"] = self.config["include_paths"]
+ app.config["READONLY"] = self.config["readonly"]
# Enable CORS if required.
- if self.config['cors']:
- self._log.info('Enabling CORS with origin: {0}',
- self.config['cors'])
+ if self.config["cors"]:
+ self._log.info(
+ "Enabling CORS with origin: {0}", self.config["cors"]
+ )
from flask_cors import CORS
- app.config['CORS_ALLOW_HEADERS'] = "Content-Type"
- app.config['CORS_RESOURCES'] = {
- r"/*": {"origins": self.config['cors'].get(str)}
+
+ app.config["CORS_ALLOW_HEADERS"] = "Content-Type"
+ app.config["CORS_RESOURCES"] = {
+ r"/*": {"origins": self.config["cors"].get(str)}
}
CORS(
app,
supports_credentials=self.config[
- 'cors_supports_credentials'
- ].get(bool)
+ "cors_supports_credentials"
+ ].get(bool),
)
# Allow serving behind a reverse proxy
- if self.config['reverse_proxy']:
+ if self.config["reverse_proxy"]:
app.wsgi_app = ReverseProxied(app.wsgi_app)
# Start the web application.
- app.run(host=self.config['host'].as_str(),
- port=self.config['port'].get(int),
- debug=opts.debug, threaded=True)
+ app.run(
+ host=self.config["host"].as_str(),
+ port=self.config["port"].get(int),
+ debug=opts.debug,
+ threaded=True,
+ )
+
cmd.func = func
return [cmd]
class ReverseProxied:
- '''Wrap the application in this middleware and configure the
+ """Wrap the application in this middleware and configure the
front-end server to add these headers, to let you quietly bind
this to a URL other than / and to an HTTP scheme that is
different than what is used locally.
@@ -514,19 +540,20 @@ class ReverseProxied:
From: http://flask.pocoo.org/snippets/35/
:param app: the WSGI application
- '''
+ """
+
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
- script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
+ script_name = environ.get("HTTP_X_SCRIPT_NAME", "")
if script_name:
- environ['SCRIPT_NAME'] = script_name
- path_info = environ['PATH_INFO']
+ environ["SCRIPT_NAME"] = script_name
+ path_info = environ["PATH_INFO"]
if path_info.startswith(script_name):
- environ['PATH_INFO'] = path_info[len(script_name):]
+ environ["PATH_INFO"] = path_info[len(script_name) :]
- scheme = environ.get('HTTP_X_SCHEME', '')
+ scheme = environ.get("HTTP_X_SCHEME", "")
if scheme:
- environ['wsgi.url_scheme'] = scheme
+ environ["wsgi.url_scheme"] = scheme
return self.app(environ, start_response)
diff --git a/beetsplug/zero.py b/beetsplug/zero.py
index f05b1b5a5f..14c157ce8a 100644
--- a/beetsplug/zero.py
+++ b/beetsplug/zero.py
@@ -17,29 +17,33 @@
import re
-from beets.plugins import BeetsPlugin
+import confuse
from mediafile import MediaFile
+
from beets.importer import action
+from beets.plugins import BeetsPlugin
from beets.ui import Subcommand, decargs, input_yn
-import confuse
-__author__ = 'baobab@heresiarch.info'
+__author__ = "baobab@heresiarch.info"
class ZeroPlugin(BeetsPlugin):
def __init__(self):
super().__init__()
- self.register_listener('write', self.write_event)
- self.register_listener('import_task_choice',
- self.import_task_choice_event)
+ self.register_listener("write", self.write_event)
+ self.register_listener(
+ "import_task_choice", self.import_task_choice_event
+ )
- self.config.add({
- 'auto': True,
- 'fields': [],
- 'keep_fields': [],
- 'update_database': False,
- })
+ self.config.add(
+ {
+ "auto": True,
+ "fields": [],
+ "keep_fields": [],
+ "update_database": False,
+ }
+ )
self.fields_to_progs = {}
self.warned = False
@@ -51,29 +55,30 @@ def __init__(self):
A field is zeroed if its value matches one of the associated progs. If
progs is empty, then the associated field is always zeroed.
"""
- if self.config['fields'] and self.config['keep_fields']:
- self._log.warning(
- 'cannot blacklist and whitelist at the same time'
- )
+ if self.config["fields"] and self.config["keep_fields"]:
+ self._log.warning("cannot blacklist and whitelist at the same time")
# Blacklist mode.
- elif self.config['fields']:
- for field in self.config['fields'].as_str_seq():
+ elif self.config["fields"]:
+ for field in self.config["fields"].as_str_seq():
self._set_pattern(field)
# Whitelist mode.
- elif self.config['keep_fields']:
+ elif self.config["keep_fields"]:
for field in MediaFile.fields():
- if (field not in self.config['keep_fields'].as_str_seq() and
- # These fields should always be preserved.
- field not in ('id', 'path', 'album_id')):
+ if (
+ field not in self.config["keep_fields"].as_str_seq()
+ and
+ # These fields should always be preserved.
+ field not in ("id", "path", "album_id")
+ ):
self._set_pattern(field)
def commands(self):
- zero_command = Subcommand('zero', help='set fields to null')
+ zero_command = Subcommand("zero", help="set fields to null")
def zero_fields(lib, opts, args):
if not decargs(args) and not input_yn(
- "Remove fields for all items? (Y/n)",
- True):
+ "Remove fields for all items? (Y/n)", True
+ ):
return
for item in lib.items(decargs(args)):
self.process_item(item)
@@ -86,10 +91,11 @@ def _set_pattern(self, field):
Do some sanity checks then compile the regexes.
"""
if field not in MediaFile.fields():
- self._log.error('invalid field: {0}', field)
- elif field in ('id', 'path', 'album_id'):
- self._log.warning('field \'{0}\' ignored, zeroing '
- 'it would be dangerous', field)
+ self._log.error("invalid field: {0}", field)
+ elif field in ("id", "path", "album_id"):
+ self._log.warning(
+ "field '{0}' ignored, zeroing " "it would be dangerous", field
+ )
else:
try:
for pattern in self.config[field].as_str_seq():
@@ -101,12 +107,12 @@ def _set_pattern(self, field):
def import_task_choice_event(self, session, task):
if task.choice_flag == action.ASIS and not self.warned:
- self._log.warning('cannot zero in \"as-is\" mode')
+ self._log.warning('cannot zero in "as-is" mode')
self.warned = True
# TODO request write in as-is mode
def write_event(self, item, path, tags):
- if self.config['auto']:
+ if self.config["auto"]:
self.set_fields(item, tags)
def set_fields(self, item, tags):
@@ -119,7 +125,7 @@ def set_fields(self, item, tags):
fields_set = False
if not self.fields_to_progs:
- self._log.warning('no fields, nothing to do')
+ self._log.warning("no fields, nothing to do")
return False
for field, progs in self.fields_to_progs.items():
@@ -127,14 +133,14 @@ def set_fields(self, item, tags):
value = tags[field]
match = _match_progs(tags[field], progs)
else:
- value = ''
+ value = ""
match = not progs
if match:
fields_set = True
- self._log.debug('{0}: {1} -> None', field, value)
+ self._log.debug("{0}: {1} -> None", field, value)
tags[field] = None
- if self.config['update_database']:
+ if self.config["update_database"]:
item[field] = None
return fields_set
@@ -144,7 +150,7 @@ def process_item(self, item):
if self.set_fields(item, tags):
item.write(tags=tags)
- if self.config['update_database']:
+ if self.config["update_database"]:
item.store(fields=tags)
diff --git a/docs/conf.py b/docs/conf.py
index ddd63e9629..4514bbe2bc 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,56 +1,63 @@
-AUTHOR = 'Adrian Sampson'
+AUTHOR = "Adrian Sampson"
# General configuration
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks']
+extensions = ["sphinx.ext.autodoc", "sphinx.ext.extlinks"]
-exclude_patterns = ['_build']
-source_suffix = '.rst'
-master_doc = 'index'
+exclude_patterns = ["_build"]
+source_suffix = ".rst"
+master_doc = "index"
-project = 'beets'
-copyright = '2016, Adrian Sampson'
+project = "beets"
+copyright = "2016, Adrian Sampson"
-version = '1.6'
-release = '1.6.1'
+version = "1.6"
+release = "1.6.1"
-pygments_style = 'sphinx'
+pygments_style = "sphinx"
# External links to the bug tracker and other sites.
extlinks = {
- 'bug': ('https://github.com/beetbox/beets/issues/%s', '#%s'),
- 'user': ('https://github.com/%s', '%s'),
- 'pypi': ('https://pypi.org/project/%s/', '%s'),
- 'stdlib': ('https://docs.python.org/3/library/%s.html', '%s'),
+ "bug": ("https://github.com/beetbox/beets/issues/%s", "#%s"),
+ "user": ("https://github.com/%s", "%s"),
+ "pypi": ("https://pypi.org/project/%s/", "%s"),
+ "stdlib": ("https://docs.python.org/3/library/%s.html", "%s"),
}
linkcheck_ignore = [
- r'https://github.com/beetbox/beets/issues/',
- r'https://github.com/[^/]+$', # ignore user pages
- r'.*localhost.*',
- r'https?://127\.0\.0\.1',
- r'https://www.musixmatch.com/', # blocks requests
- r'https://genius.com/', # blocks requests
+ r"https://github.com/beetbox/beets/issues/",
+ r"https://github.com/[^/]+$", # ignore user pages
+ r".*localhost.*",
+ r"https?://127\.0\.0\.1",
+ r"https://www.musixmatch.com/", # blocks requests
+ r"https://genius.com/", # blocks requests
]
# Options for HTML output
-htmlhelp_basename = 'beetsdoc'
+htmlhelp_basename = "beetsdoc"
# Options for LaTeX output
latex_documents = [
- ('index', 'beets.tex', 'beets Documentation',
- AUTHOR, 'manual'),
+ ("index", "beets.tex", "beets Documentation", AUTHOR, "manual"),
]
# Options for manual page output
man_pages = [
- ('reference/cli', 'beet', 'music tagger and library organizer',
- [AUTHOR], 1),
- ('reference/config', 'beetsconfig', 'beets configuration file',
- [AUTHOR], 5),
+ (
+ "reference/cli",
+ "beet",
+ "music tagger and library organizer",
+ [AUTHOR],
+ 1,
+ ),
+ (
+ "reference/config",
+ "beetsconfig",
+ "beets configuration file",
+ [AUTHOR],
+ 5,
+ ),
]
# Options for Alabaster theme
-html_theme_options = {
- "fixed_sidebar": True
-}
+html_theme_options = {"fixed_sidebar": True}
diff --git a/extra/release.py b/extra/release.py
index fc1c967058..c3142268fb 100755
--- a/extra/release.py
+++ b/extra/release.py
@@ -2,21 +2,21 @@
"""A utility script for automating the beets release process.
"""
-import click
+import datetime
import os
import re
import subprocess
from contextlib import contextmanager
-import datetime
+
+import click
BASE = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-CHANGELOG = os.path.join(BASE, 'docs', 'changelog.rst')
+CHANGELOG = os.path.join(BASE, "docs", "changelog.rst")
@contextmanager
def chdir(d):
- """A context manager that temporary changes the working directory.
- """
+ """A context manager that temporary changes the working directory."""
olddir = os.getcwd()
os.chdir(d)
yield
@@ -31,16 +31,16 @@ def release():
# Locations (filenames and patterns) of the version number.
VERSION_LOCS = [
(
- os.path.join(BASE, 'beets', '__init__.py'),
+ os.path.join(BASE, "beets", "__init__.py"),
[
(
r'__version__\s*=\s*[\'"]([0-9\.]+)[\'"]',
"__version__ = '{version}'",
)
- ]
+ ],
),
(
- os.path.join(BASE, 'docs', 'conf.py'),
+ os.path.join(BASE, "docs", "conf.py"),
[
(
r'version\s*=\s*[\'"]([0-9\.]+)[\'"]',
@@ -50,31 +50,31 @@ def release():
r'release\s*=\s*[\'"]([0-9\.]+)[\'"]',
"release = '{version}'",
),
- ]
+ ],
),
(
- os.path.join(BASE, 'setup.py'),
+ os.path.join(BASE, "setup.py"),
[
(
r'\s*version\s*=\s*[\'"]([0-9\.]+)[\'"]',
" version='{version}',",
)
- ]
+ ],
),
]
-GITHUB_USER = 'beetbox'
-GITHUB_REPO = 'beets'
+GITHUB_USER = "beetbox"
+GITHUB_REPO = "beets"
def bump_version(version):
"""Update the version number in setup.py, docs config, changelog,
and root module.
"""
- version_parts = [int(p) for p in version.split('.')]
+ version_parts = [int(p) for p in version.split(".")]
assert len(version_parts) == 3, "invalid version number"
- minor = '{}.{}'.format(*version_parts)
- major = '{}'.format(*version_parts)
+ minor = "{}.{}".format(*version_parts)
+ major = "{}".format(*version_parts)
# Replace the version each place where it lives.
for filename, locations in VERSION_LOCS:
@@ -88,18 +88,20 @@ def bump_version(version):
if match:
# Check that this version is actually newer.
old_version = match.group(1)
- old_parts = [int(p) for p in old_version.split('.')]
- assert version_parts > old_parts, \
- "version must be newer than {}".format(
- old_version
- )
+ old_parts = [int(p) for p in old_version.split(".")]
+ assert (
+ version_parts > old_parts
+ ), "version must be newer than {}".format(old_version)
# Insert the new version.
- out_lines.append(template.format(
- version=version,
- major=major,
- minor=minor,
- ) + '\n')
+ out_lines.append(
+ template.format(
+ version=version,
+ major=major,
+ minor=minor,
+ )
+ + "\n"
+ )
found = True
break
@@ -112,41 +114,39 @@ def bump_version(version):
print(f"No pattern found in {filename}")
# Write the file back.
- with open(filename, 'w') as f:
- f.write(''.join(out_lines))
+ with open(filename, "w") as f:
+ f.write("".join(out_lines))
# Generate bits to insert into changelog.
- header_line = f'{version} (in development)'
- header = '\n\n' + header_line + '\n' + '-' * len(header_line) + '\n\n'
- header += 'Changelog goes here!\n'
+ header_line = f"{version} (in development)"
+ header = "\n\n" + header_line + "\n" + "-" * len(header_line) + "\n\n"
+ header += "Changelog goes here!\n"
# Insert into the right place.
with open(CHANGELOG) as f:
contents = f.read()
- location = contents.find('\n\n') # First blank line.
+ location = contents.find("\n\n") # First blank line.
contents = contents[:location] + header + contents[location:]
# Write back.
- with open(CHANGELOG, 'w') as f:
+ with open(CHANGELOG, "w") as f:
f.write(contents)
@release.command()
-@click.argument('version')
+@click.argument("version")
def bump(version):
- """Bump the version number.
- """
+ """Bump the version number."""
bump_version(version)
def get_latest_changelog():
- """Extract the first section of the changelog.
- """
+ """Extract the first section of the changelog."""
started = False
lines = []
with open(CHANGELOG) as f:
for line in f:
- if re.match(r'^--+$', line.strip()):
+ if re.match(r"^--+$", line.strip()):
# Section boundary. Start or end.
if started:
# Remove last line, which is the header of the next
@@ -158,69 +158,67 @@ def get_latest_changelog():
elif started:
lines.append(line)
- return ''.join(lines).strip()
+ return "".join(lines).strip()
def rst2md(text):
- """Use Pandoc to convert text from ReST to Markdown.
- """
+ """Use Pandoc to convert text from ReST to Markdown."""
pandoc = subprocess.Popen(
- ['pandoc', '--from=rst', '--to=markdown', '--wrap=none'],
- stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ ["pandoc", "--from=rst", "--to=markdown", "--wrap=none"],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
)
- stdout, _ = pandoc.communicate(text.encode('utf-8'))
- md = stdout.decode('utf-8').strip()
+ stdout, _ = pandoc.communicate(text.encode("utf-8"))
+ md = stdout.decode("utf-8").strip()
# Fix up odd spacing in lists.
- return re.sub(r'^- ', '- ', md, flags=re.M)
+ return re.sub(r"^- ", "- ", md, flags=re.M)
def changelog_as_markdown():
- """Get the latest changelog entry as hacked up Markdown.
- """
+ """Get the latest changelog entry as hacked up Markdown."""
rst = get_latest_changelog()
# Replace plugin links with plugin names.
- rst = re.sub(r':doc:`/plugins/(\w+)`', r'``\1``', rst)
+ rst = re.sub(r":doc:`/plugins/(\w+)`", r"``\1``", rst)
# References with text.
- rst = re.sub(r':ref:`([^<]+)(<[^>]+>)`', r'\1', rst)
+ rst = re.sub(r":ref:`([^<]+)(<[^>]+>)`", r"\1", rst)
# Other backslashes with verbatim ranges.
- rst = re.sub(r'(\s)`([^`]+)`([^_])', r'\1``\2``\3', rst)
+ rst = re.sub(r"(\s)`([^`]+)`([^_])", r"\1``\2``\3", rst)
# Command links with command names.
- rst = re.sub(r':ref:`(\w+)-cmd`', r'``\1``', rst)
+ rst = re.sub(r":ref:`(\w+)-cmd`", r"``\1``", rst)
# Bug numbers.
- rst = re.sub(r':bug:`(\d+)`', r'#\1', rst)
+ rst = re.sub(r":bug:`(\d+)`", r"#\1", rst)
# Users.
- rst = re.sub(r':user:`(\w+)`', r'@\1', rst)
+ rst = re.sub(r":user:`(\w+)`", r"@\1", rst)
# Convert with Pandoc.
md = rst2md(rst)
# Restore escaped issue numbers.
- md = re.sub(r'\\#(\d+)\b', r'#\1', md)
+ md = re.sub(r"\\#(\d+)\b", r"#\1", md)
return md
@release.command()
def changelog():
- """Get the most recent version's changelog as Markdown.
- """
+ """Get the most recent version's changelog as Markdown."""
print(changelog_as_markdown())
def get_version(index=0):
- """Read the current version from the changelog.
- """
+ """Read the current version from the changelog."""
with open(CHANGELOG) as f:
cur_index = 0
for line in f:
- match = re.search(r'^\d+\.\d+\.\d+', line)
+ match = re.search(r"^\d+\.\d+\.\d+", line)
if match:
if cur_index == index:
return match.group(0)
@@ -230,18 +228,16 @@ def get_version(index=0):
@release.command()
def version():
- """Display the current version.
- """
+ """Display the current version."""
print(get_version())
@release.command()
def datestamp():
- """Enter today's date as the release date in the changelog.
- """
+ """Enter today's date as the release date in the changelog."""
dt = datetime.datetime.now()
- stamp = '({} {}, {})'.format(dt.strftime('%B'), dt.day, dt.year)
- marker = '(in development)'
+ stamp = "({} {}, {})".format(dt.strftime("%B"), dt.day, dt.year)
+ marker = "(in development)"
lines = []
underline_length = None
@@ -254,12 +250,12 @@ def datestamp():
underline_length = len(line.strip())
elif underline_length:
# This is the line after the header. Rewrite the dashes.
- lines.append('-' * underline_length + '\n')
+ lines.append("-" * underline_length + "\n")
underline_length = None
else:
lines.append(line)
- with open(CHANGELOG, 'w') as f:
+ with open(CHANGELOG, "w") as f:
for line in lines:
f.write(line)
@@ -276,22 +272,22 @@ def prep():
cur_version = get_version()
# Tag.
- subprocess.check_call(['git', 'tag', f'v{cur_version}'])
+ subprocess.check_call(["git", "tag", f"v{cur_version}"])
# Build.
with chdir(BASE):
- subprocess.check_call(['python', 'setup.py', 'sdist'])
+ subprocess.check_call(["python", "setup.py", "sdist"])
# Generate Markdown changelog.
cl = changelog_as_markdown()
- with open(os.path.join(BASE, 'changelog.md'), 'w') as f:
+ with open(os.path.join(BASE, "changelog.md"), "w") as f:
f.write(cl)
# Version number bump.
# FIXME It should be possible to specify this as an argument.
- version_parts = [int(n) for n in cur_version.split('.')]
+ version_parts = [int(n) for n in cur_version.split(".")]
version_parts[-1] += 1
- next_version = '.'.join(map(str, version_parts))
+ next_version = ".".join(map(str, version_parts))
bump_version(next_version)
@@ -306,12 +302,12 @@ def publish():
# Push to GitHub.
with chdir(BASE):
- subprocess.check_call(['git', 'push'])
- subprocess.check_call(['git', 'push', '--tags'])
+ subprocess.check_call(["git", "push"])
+ subprocess.check_call(["git", "push", "--tags"])
# Upload to PyPI.
- path = os.path.join(BASE, 'dist', f'beets-{version}.tar.gz')
- subprocess.check_call(['twine', 'upload', path])
+ path = os.path.join(BASE, "dist", f"beets-{version}.tar.gz")
+ subprocess.check_call(["twine", "upload", path])
@release.command()
@@ -323,31 +319,49 @@ def ghrelease():
tarball from the `dist` directory.
"""
version = get_version(1)
- tag = 'v' + version
+ tag = "v" + version
# Load the changelog.
- with open(os.path.join(BASE, 'changelog.md')) as f:
+ with open(os.path.join(BASE, "changelog.md")) as f:
cl_md = f.read()
# Create the release.
- subprocess.check_call([
- 'github-release', 'release',
- '-u', GITHUB_USER, '-r', GITHUB_REPO,
- '--tag', tag,
- '--name', f'{GITHUB_REPO} {version}',
- '--description', cl_md,
- ])
+ subprocess.check_call(
+ [
+ "github-release",
+ "release",
+ "-u",
+ GITHUB_USER,
+ "-r",
+ GITHUB_REPO,
+ "--tag",
+ tag,
+ "--name",
+ f"{GITHUB_REPO} {version}",
+ "--description",
+ cl_md,
+ ]
+ )
# Attach the release tarball.
- tarball = os.path.join(BASE, 'dist', f'beets-{version}.tar.gz')
- subprocess.check_call([
- 'github-release', 'upload',
- '-u', GITHUB_USER, '-r', GITHUB_REPO,
- '--tag', tag,
- '--name', os.path.basename(tarball),
- '--file', tarball,
- ])
+ tarball = os.path.join(BASE, "dist", f"beets-{version}.tar.gz")
+ subprocess.check_call(
+ [
+ "github-release",
+ "upload",
+ "-u",
+ GITHUB_USER,
+ "-r",
+ GITHUB_REPO,
+ "--tag",
+ tag,
+ "--name",
+ os.path.basename(tarball),
+ "--file",
+ tarball,
+ ]
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
release()
diff --git a/setup.py b/setup.py
index 26eb8048a1..c96f1fe24f 100755
--- a/setup.py
+++ b/setup.py
@@ -16,9 +16,10 @@
import os
-import sys
-import subprocess
import shutil
+import subprocess
+import sys
+
from setuptools import setup
@@ -29,11 +30,11 @@ def _read(fn):
def build_manpages():
# Go into the docs directory and build the manpage.
- docdir = os.path.join(os.path.dirname(__file__), 'docs')
+ docdir = os.path.join(os.path.dirname(__file__), "docs")
curdir = os.getcwd()
os.chdir(docdir)
try:
- subprocess.check_call(['make', 'man'])
+ subprocess.check_call(["make", "man"])
except OSError:
print("Could not build manpages (make man failed)!", file=sys.stderr)
return
@@ -41,122 +42,121 @@ def build_manpages():
os.chdir(curdir)
# Copy resulting manpages.
- mandir = os.path.join(os.path.dirname(__file__), 'man')
+ mandir = os.path.join(os.path.dirname(__file__), "man")
if os.path.exists(mandir):
shutil.rmtree(mandir)
- shutil.copytree(os.path.join(docdir, '_build', 'man'), mandir)
+ shutil.copytree(os.path.join(docdir, "_build", "man"), mandir)
# Build manpages if we're making a source distribution tarball.
-if 'sdist' in sys.argv:
+if "sdist" in sys.argv:
build_manpages()
setup(
- name='beets',
- version='1.6.1',
- description='music tagger and library organizer',
- author='Adrian Sampson',
- author_email='adrian@radbox.org',
- url='https://beets.io/',
- license='MIT',
- platforms='ALL',
- long_description=_read('README.rst'),
- test_suite='test.testall.suite',
+ name="beets",
+ version="1.6.1",
+ description="music tagger and library organizer",
+ author="Adrian Sampson",
+ author_email="adrian@radbox.org",
+ url="https://beets.io/",
+ license="MIT",
+ platforms="ALL",
+ long_description=_read("README.rst"),
+ test_suite="test.testall.suite",
zip_safe=False,
include_package_data=True, # Install plugin resources.
-
packages=[
- 'beets',
- 'beets.ui',
- 'beets.autotag',
- 'beets.util',
- 'beets.dbcore',
- 'beetsplug',
- 'beetsplug.bpd',
- 'beetsplug.web',
- 'beetsplug.lastgenre',
- 'beetsplug.metasync',
+ "beets",
+ "beets.ui",
+ "beets.autotag",
+ "beets.util",
+ "beets.dbcore",
+ "beetsplug",
+ "beetsplug.bpd",
+ "beetsplug.web",
+ "beetsplug.lastgenre",
+ "beetsplug.metasync",
],
entry_points={
- 'console_scripts': [
- 'beet = beets.ui:main',
+ "console_scripts": [
+ "beet = beets.ui:main",
],
},
-
install_requires=[
- 'unidecode>=1.3.6',
- 'musicbrainzngs>=0.4',
- 'pyyaml',
- 'mediafile>=0.12.0',
- 'confuse>=1.5.0',
- 'munkres>=1.0.0',
- 'jellyfish',
- 'typing_extensions',
- ] + (
+ "unidecode>=1.3.6",
+ "musicbrainzngs>=0.4",
+ "pyyaml",
+ "mediafile>=0.12.0",
+ "confuse>=1.5.0",
+ "munkres>=1.0.0",
+ "jellyfish",
+ "typing_extensions",
+ ]
+ + (
# Support for ANSI console colors on Windows.
- ['colorama'] if (sys.platform == 'win32') else []
+ ["colorama"]
+ if (sys.platform == "win32")
+ else []
),
-
extras_require={
- 'test': [
- 'beautifulsoup4',
- 'coverage',
- 'flask',
- 'mock',
- 'pylast',
- 'pytest',
- 'python-mpd2',
- 'pyxdg',
- 'responses>=0.3.0',
- 'requests_oauthlib',
- 'reflink',
- 'rarfile',
- 'python3-discogs-client>=2.3.15',
- 'py7zr',
+ "test": [
+ "beautifulsoup4",
+ "coverage",
+ "flask",
+ "mock",
+ "pylast",
+ "pytest",
+ "python-mpd2",
+ "pyxdg",
+ "responses>=0.3.0",
+ "requests_oauthlib",
+ "reflink",
+ "rarfile",
+ "python3-discogs-client>=2.3.15",
+ "py7zr",
],
- 'lint': [
- 'flake8',
- 'flake8-docstrings',
- 'pep8-naming',
+ "lint": [
+ "flake8",
+ "flake8-docstrings",
+ "pep8-naming",
],
- 'mypy': [
- 'mypy',
- 'types-Pillow',
- 'types-urllib3',
- 'types-beautifulsoup4',
- 'types-PyYAML',
- 'types-requests',
- 'types-Flask-Cors',
+ "mypy": [
+ "mypy",
+ "types-Pillow",
+ "types-urllib3",
+ "types-beautifulsoup4",
+ "types-PyYAML",
+ "types-requests",
+ "types-Flask-Cors",
],
- 'docs': [
- 'sphinx',
- 'sphinx_rtd_theme',
+ "docs": [
+ "sphinx",
+ "sphinx_rtd_theme",
],
-
# Plugin (optional) dependencies:
- 'absubmit': ['requests'],
- 'fetchart': ['requests', 'Pillow', 'beautifulsoup4'],
- 'embedart': ['Pillow'],
- 'embyupdate': ['requests'],
- 'chroma': ['pyacoustid'],
- 'discogs': ['python3-discogs-client>=2.3.15'],
- 'beatport': ['requests-oauthlib>=0.6.1'],
- 'kodiupdate': ['requests'],
- 'lastgenre': ['pylast'],
- 'lastimport': ['pylast'],
- 'lyrics': ['requests', 'beautifulsoup4', 'langdetect'],
- 'mpdstats': ['python-mpd2>=0.4.2'],
- 'plexupdate': ['requests'],
- 'web': ['flask', 'flask-cors'],
- 'import': ['rarfile', 'py7zr'],
- 'thumbnails': ['pyxdg', 'Pillow'],
- 'metasync': ['dbus-python'],
- 'sonosupdate': ['soco'],
- 'scrub': ['mutagen>=1.33'],
- 'bpd': ['PyGObject'],
- 'replaygain': ['PyGObject'],
- 'reflink': ['reflink'],
+ "absubmit": ["requests"],
+ "fetchart": ["requests", "Pillow", "beautifulsoup4"],
+ "embedart": ["Pillow"],
+ "embyupdate": ["requests"],
+ "chroma": ["pyacoustid"],
+ "discogs": ["python3-discogs-client>=2.3.15"],
+ "beatport": ["requests-oauthlib>=0.6.1"],
+ "kodiupdate": ["requests"],
+ "lastgenre": ["pylast"],
+ "lastimport": ["pylast"],
+ "lyrics": ["requests", "beautifulsoup4", "langdetect"],
+ "mpdstats": ["python-mpd2>=0.4.2"],
+ "plexupdate": ["requests"],
+ "web": ["flask", "flask-cors"],
+ "import": ["rarfile", "py7zr"],
+ "thumbnails": ["pyxdg", "Pillow"],
+ "metasync": ["dbus-python"],
+ "sonosupdate": ["soco"],
+ "scrub": ["mutagen>=1.33"],
+ "bpd": ["PyGObject"],
+ "replaygain": ["PyGObject"],
+ "reflink": ["reflink"],
},
# Non-Python/non-PyPI plugin dependencies:
# chroma: chromaprint or fpcalc
@@ -171,19 +171,18 @@ def build_manpages():
# or Python Audio Tools
# or ffmpeg
# ipfs: go-ipfs
-
classifiers=[
- 'Topic :: Multimedia :: Sound/Audio',
- 'Topic :: Multimedia :: Sound/Audio :: Players :: MP3',
- 'License :: OSI Approved :: MIT License',
- 'Environment :: Console',
- 'Environment :: Web Environment',
- 'Programming Language :: Python',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.7',
- 'Programming Language :: Python :: 3.8',
- 'Programming Language :: Python :: 3.9',
- 'Programming Language :: Python :: 3.10',
- 'Programming Language :: Python :: Implementation :: CPython',
+ "Topic :: Multimedia :: Sound/Audio",
+ "Topic :: Multimedia :: Sound/Audio :: Players :: MP3",
+ "License :: OSI Approved :: MIT License",
+ "Environment :: Console",
+ "Environment :: Web Environment",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: Implementation :: CPython",
],
)
diff --git a/test/_common.py b/test/_common.py
index 065e36dbeb..01839e96f8 100644
--- a/test/_common.py
+++ b/test/_common.py
@@ -14,36 +14,36 @@
"""Some common functionality for beets' test cases."""
-import time
-import sys
import os
-import tempfile
import shutil
+import sys
+import tempfile
+import time
import unittest
from contextlib import contextmanager
-
# Mangle the search path to include the beets sources.
-sys.path.insert(0, '..')
+sys.path.insert(0, "..")
+import beets # noqa: E402
import beets.library # noqa: E402
+
+# Make sure the development versions of the plugins are used
+import beetsplug # noqa: E402
+from beets import util # noqa: E402
from beets import importer, logging # noqa: E402
from beets.ui import commands # noqa: E402
-from beets import util # noqa: E402
from beets.util import bytestring_path, syspath # noqa: E402
-import beets # noqa: E402
-# Make sure the development versions of the plugins are used
-import beetsplug # noqa: E402
-beetsplug.__path__ = [os.path.abspath(
- os.path.join(__file__, '..', '..', 'beetsplug')
-)]
+beetsplug.__path__ = [
+ os.path.abspath(os.path.join(__file__, "..", "..", "beetsplug"))
+]
# Test resources path.
-RSRC = util.bytestring_path(os.path.join(os.path.dirname(__file__), 'rsrc'))
-PLUGINPATH = os.path.join(os.path.dirname(__file__), 'rsrc', 'beetsplug')
+RSRC = util.bytestring_path(os.path.join(os.path.dirname(__file__), "rsrc"))
+PLUGINPATH = os.path.join(os.path.dirname(__file__), "rsrc", "beetsplug")
# Propagate to root logger so the test runner can capture it
-log = logging.getLogger('beets')
+log = logging.getLogger("beets")
log.propagate = True
log.setLevel(logging.DEBUG)
@@ -51,11 +51,12 @@
_item_ident = 0
# OS feature test.
-HAVE_SYMLINK = sys.platform != 'win32'
-HAVE_HARDLINK = sys.platform != 'win32'
+HAVE_SYMLINK = sys.platform != "win32"
+HAVE_HARDLINK = sys.platform != "win32"
try:
import reflink
+
HAVE_REFLINK = reflink.supported_at(tempfile.gettempdir())
except ImportError:
HAVE_REFLINK = False
@@ -65,18 +66,18 @@ def item(lib=None):
global _item_ident
_item_ident += 1
i = beets.library.Item(
- title='the title',
- artist='the artist',
- albumartist='the album artist',
- album='the album',
- genre='the genre',
- lyricist='the lyricist',
- composer='the composer',
- arranger='the arranger',
- grouping='the grouping',
- work='the work title',
- mb_workid='the work musicbrainz id',
- work_disambig='the work disambiguation',
+ title="the title",
+ artist="the artist",
+ albumartist="the album artist",
+ album="the album",
+ genre="the genre",
+ lyricist="the lyricist",
+ composer="the composer",
+ arranger="the arranger",
+ grouping="the grouping",
+ work="the work title",
+ mb_workid="the work musicbrainz id",
+ work_disambig="the work disambiguation",
year=1,
month=2,
day=3,
@@ -84,19 +85,19 @@ def item(lib=None):
tracktotal=5,
disc=6,
disctotal=7,
- lyrics='the lyrics',
- comments='the comments',
+ lyrics="the lyrics",
+ comments="the comments",
bpm=8,
comp=True,
- path=f'somepath{_item_ident}',
+ path=f"somepath{_item_ident}",
length=60.0,
bitrate=128000,
- format='FLAC',
- mb_trackid='someID-1',
- mb_albumid='someID-2',
- mb_artistid='someID-3',
- mb_albumartistid='someID-4',
- mb_releasetrackid='someID-5',
+ format="FLAC",
+ mb_trackid="someID-1",
+ mb_albumid="someID-2",
+ mb_artistid="someID-3",
+ mb_albumartistid="someID-4",
+ mb_releasetrackid="someID-5",
album_id=None,
mtime=12345,
)
@@ -104,6 +105,7 @@ def item(lib=None):
lib.add(i)
return i
+
_album_ident = 0
@@ -112,19 +114,19 @@ def album(lib=None):
_item_ident += 1
i = beets.library.Album(
artpath=None,
- albumartist='some album artist',
- albumartist_sort='some sort album artist',
- albumartist_credit='some album artist credit',
- album='the album',
- genre='the genre',
+ albumartist="some album artist",
+ albumartist_sort="some sort album artist",
+ albumartist_credit="some album artist credit",
+ album="the album",
+ genre="the genre",
year=2014,
month=2,
day=5,
tracktotal=0,
disctotal=1,
comp=False,
- mb_albumid='someID-1',
- mb_albumartistid='someID-1'
+ mb_albumid="someID-1",
+ mb_albumartistid="someID-1",
)
if lib:
lib.add(i)
@@ -141,29 +143,36 @@ class Assertions:
"""A mixin with additional unit test assertions."""
def assertExists(self, path): # noqa
- self.assertTrue(os.path.exists(syspath(path)),
- f'file does not exist: {path!r}')
+ self.assertTrue(
+ os.path.exists(syspath(path)), f"file does not exist: {path!r}"
+ )
def assertNotExists(self, path): # noqa
- self.assertFalse(os.path.exists(syspath(path)),
- f'file exists: {path!r}')
+ self.assertFalse(
+ os.path.exists(syspath(path)), f"file exists: {path!r}"
+ )
def assertIsFile(self, path): # noqa
self.assertExists(path)
- self.assertTrue(os.path.isfile(syspath(path)),
- u'path exists, but is not a regular file: {!r}'
- .format(path))
+ self.assertTrue(
+ os.path.isfile(syspath(path)),
+ "path exists, but is not a regular file: {!r}".format(path),
+ )
def assertIsDir(self, path): # noqa
self.assertExists(path)
- self.assertTrue(os.path.isdir(syspath(path)),
- u'path exists, but is not a directory: {!r}'
- .format(path))
+ self.assertTrue(
+ os.path.isdir(syspath(path)),
+ "path exists, but is not a directory: {!r}".format(path),
+ )
def assert_equal_path(self, a, b):
"""Check that two paths are equal."""
- self.assertEqual(util.normpath(a), util.normpath(b),
- f'paths are not equal: {a!r} and {b!r}')
+ self.assertEqual(
+ util.normpath(a),
+ util.normpath(b),
+ f"paths are not equal: {a!r} and {b!r}",
+ )
# A test harness for all beets tests.
@@ -175,6 +184,7 @@ class TestCase(unittest.TestCase, Assertions):
completes. Also provides some additional assertion methods, a
temporary directory, and a DummyIO.
"""
+
def setUp(self):
# A "clean" source list including only the defaults.
beets.config.sources = []
@@ -184,16 +194,19 @@ def setUp(self):
# temporary directory.
self.temp_dir = util.bytestring_path(tempfile.mkdtemp())
- beets.config['statefile'] = \
- util.py3_path(os.path.join(self.temp_dir, b'state.pickle'))
- beets.config['library'] = \
- util.py3_path(os.path.join(self.temp_dir, b'library.db'))
- beets.config['directory'] = \
- util.py3_path(os.path.join(self.temp_dir, b'libdir'))
+ beets.config["statefile"] = util.py3_path(
+ os.path.join(self.temp_dir, b"state.pickle")
+ )
+ beets.config["library"] = util.py3_path(
+ os.path.join(self.temp_dir, b"library.db")
+ )
+ beets.config["directory"] = util.py3_path(
+ os.path.join(self.temp_dir, b"libdir")
+ )
# Set $HOME, which is used by Confuse to create directories.
- self._old_home = os.environ.get('HOME')
- os.environ['HOME'] = util.py3_path(self.temp_dir)
+ self._old_home = os.environ.get("HOME")
+ os.environ["HOME"] = util.py3_path(self.temp_dir)
# Initialize, but don't install, a DummyIO.
self.io = DummyIO()
@@ -202,9 +215,9 @@ def tearDown(self):
if os.path.isdir(syspath(self.temp_dir)):
shutil.rmtree(syspath(self.temp_dir))
if self._old_home is None:
- del os.environ['HOME']
+ del os.environ["HOME"]
else:
- os.environ['HOME'] = self._old_home
+ os.environ["HOME"] = self._old_home
self.io.restore()
beets.config.clear()
@@ -215,9 +228,10 @@ class LibTestCase(TestCase):
"""A test case that includes an in-memory library object (`lib`) and
an item added to the library (`i`).
"""
+
def setUp(self):
super().setUp()
- self.lib = beets.library.Library(':memory:')
+ self.lib = beets.library.Library(":memory:")
self.i = item(self.lib)
def tearDown(self):
@@ -227,10 +241,12 @@ def tearDown(self):
# Mock timing.
+
class Timecop:
"""Mocks the timing system (namely time() and sleep()) for testing.
Inspired by the Ruby timecop library.
"""
+
def __init__(self):
self.now = time.time()
@@ -242,19 +258,20 @@ def sleep(self, amount):
def install(self):
self.orig = {
- 'time': time.time,
- 'sleep': time.sleep,
+ "time": time.time,
+ "sleep": time.sleep,
}
time.time = self.time
time.sleep = self.sleep
def restore(self):
- time.time = self.orig['time']
- time.sleep = self.orig['sleep']
+ time.time = self.orig["time"]
+ time.sleep = self.orig["sleep"]
# Mock I/O.
+
class InputException(Exception):
def __init__(self, output=None):
self.output = output
@@ -267,7 +284,7 @@ def __str__(self):
class DummyOut:
- encoding = 'utf-8'
+ encoding = "utf-8"
def __init__(self):
self.buf = []
@@ -276,7 +293,7 @@ def write(self, s):
self.buf.append(s)
def get(self):
- return ''.join(self.buf)
+ return "".join(self.buf)
def flush(self):
self.clear()
@@ -286,7 +303,7 @@ def clear(self):
class DummyIn:
- encoding = 'utf-8'
+ encoding = "utf-8"
def __init__(self, out=None):
self.buf = []
@@ -294,7 +311,7 @@ def __init__(self, out=None):
self.out = out
def add(self, s):
- self.buf.append(s + '\n')
+ self.buf.append(s + "\n")
def close(self):
pass
@@ -311,6 +328,7 @@ def readline(self):
class DummyIO:
"""Mocks input and output streams for testing UI code."""
+
def __init__(self):
self.stdout = DummyOut()
self.stdin = DummyIn(self.stdout)
@@ -337,8 +355,9 @@ def restore(self):
# Utility.
+
def touch(path):
- open(syspath(path), 'a').close()
+ open(syspath(path), "a").close()
class Bag:
@@ -346,6 +365,7 @@ class Bag:
arguments. Any field not found in the dictionary appears to be None.
Used for mocking Album objects and the like.
"""
+
def __init__(self, **fields):
self.fields = fields
@@ -356,9 +376,9 @@ def __getattr__(self, key):
# Convenience methods for setting up a temporary sandbox directory for tests
# that need to interact with the filesystem.
+
class TempDirMixin:
- """Text mixin for creating and deleting a temporary directory.
- """
+ """Text mixin for creating and deleting a temporary directory."""
def create_temp_dir(self):
"""Create a temporary directory and assign it into `self.temp_dir`.
@@ -367,17 +387,18 @@ def create_temp_dir(self):
self.temp_dir = bytestring_path(tempfile.mkdtemp())
def remove_temp_dir(self):
- """Delete the temporary directory created by `create_temp_dir`.
- """
+ """Delete the temporary directory created by `create_temp_dir`."""
if os.path.isdir(syspath(self.temp_dir)):
shutil.rmtree(syspath(self.temp_dir))
# Platform mocking.
+
@contextmanager
def platform_windows():
import ntpath
+
old_path = os.path
try:
os.path = ntpath
@@ -389,6 +410,7 @@ def platform_windows():
@contextmanager
def platform_posix():
import posixpath
+
old_path = os.path
try:
os.path = posixpath
@@ -400,6 +422,7 @@ def platform_posix():
@contextmanager
def system_mock(name):
import platform
+
old_system = platform.system
platform.system = lambda: name
try:
@@ -411,6 +434,7 @@ def system_mock(name):
def slow_test(unused=None):
def _id(obj):
return obj
- if 'SKIP_SLOW_TESTS' in os.environ:
- return unittest.skip('test is slow')
+
+ if "SKIP_SLOW_TESTS" in os.environ:
+ return unittest.skip("test is slow")
return _id
diff --git a/test/helper.py b/test/helper.py
index b5055cfef6..6e73990c09 100644
--- a/test/helper.py
+++ b/test/helper.py
@@ -30,33 +30,30 @@
"""
-import sys
import os
import os.path
import shutil
import subprocess
-from tempfile import mkdtemp, mkstemp
+import sys
from contextlib import contextmanager
-from io import StringIO
from enum import Enum
+from io import StringIO
+from tempfile import mkdtemp, mkstemp
+
+# TODO Move AutotagMock here
+from test import _common
+
+from mediafile import Image, MediaFile
import beets
-from beets import logging
-from beets import config
import beets.plugins
-from beets.library import Library, Item, Album
-from beets import importer
+from beets import config, importer, logging, util
from beets.autotag.hooks import AlbumInfo, TrackInfo
-from mediafile import MediaFile, Image
-from beets import util
-from beets.util import MoveOperation, syspath, bytestring_path
-
-# TODO Move AutotagMock here
-from test import _common
+from beets.library import Album, Item, Library
+from beets.util import MoveOperation, bytestring_path, syspath
class LogCapture(logging.Handler):
-
def __init__(self):
logging.Handler.__init__(self)
self.messages = []
@@ -66,7 +63,7 @@ def emit(self, record):
@contextmanager
-def capture_log(logger='beets'):
+def capture_log(logger="beets"):
capture = LogCapture()
log = logging.getLogger(logger)
log.addHandler(capture)
@@ -113,7 +110,7 @@ def capture_stdout():
def _convert_args(args):
"""Convert args to bytestrings for Python 2 and convert them to strings
- on Python 3.
+ on Python 3.
"""
for i, elem in enumerate(args):
if isinstance(elem, bytes):
@@ -122,14 +119,14 @@ def _convert_args(args):
return args
-def has_program(cmd, args=['--version']):
- """Returns `True` if `cmd` can be executed.
- """
+def has_program(cmd, args=["--version"]):
+ """Returns `True` if `cmd` can be executed."""
full_cmd = _convert_args([cmd] + args)
try:
- with open(os.devnull, 'wb') as devnull:
- subprocess.check_call(full_cmd, stderr=devnull,
- stdout=devnull, stdin=devnull)
+ with open(os.devnull, "wb") as devnull:
+ subprocess.check_call(
+ full_cmd, stderr=devnull, stdout=devnull, stdin=devnull
+ )
except OSError:
return False
except subprocess.CalledProcessError:
@@ -144,6 +141,7 @@ class TestHelper:
This mixin provides methods to isolate beets' global state provide
fixtures.
"""
+
# TODO automate teardown through hook registration
def setup_beets(self, disk=False):
@@ -169,33 +167,31 @@ def setup_beets(self, disk=False):
Make sure you call ``teardown_beets()`` afterwards.
"""
self.create_temp_dir()
- os.environ['BEETSDIR'] = util.py3_path(self.temp_dir)
+ os.environ["BEETSDIR"] = util.py3_path(self.temp_dir)
self.config = beets.config
self.config.clear()
self.config.read()
- self.config['plugins'] = []
- self.config['verbose'] = 1
- self.config['ui']['color'] = False
- self.config['threaded'] = False
+ self.config["plugins"] = []
+ self.config["verbose"] = 1
+ self.config["ui"]["color"] = False
+ self.config["threaded"] = False
- self.libdir = os.path.join(self.temp_dir, b'libdir')
+ self.libdir = os.path.join(self.temp_dir, b"libdir")
os.mkdir(syspath(self.libdir))
- self.config['directory'] = util.py3_path(self.libdir)
+ self.config["directory"] = util.py3_path(self.libdir)
if disk:
- dbpath = util.bytestring_path(
- self.config['library'].as_filename()
- )
+ dbpath = util.bytestring_path(self.config["library"].as_filename())
else:
- dbpath = ':memory:'
+ dbpath = ":memory:"
self.lib = Library(dbpath, self.libdir)
def teardown_beets(self):
self.lib._close()
- if 'BEETSDIR' in os.environ:
- del os.environ['BEETSDIR']
+ if "BEETSDIR" in os.environ:
+ del os.environ["BEETSDIR"]
self.remove_temp_dir()
self.config.clear()
beets.config.read(user=False, defaults=True)
@@ -207,7 +203,7 @@ def load_plugins(self, *plugins):
sure you call ``unload_plugins()`` afterwards.
"""
# FIXME this should eventually be handled by a plugin manager
- beets.config['plugins'] = plugins
+ beets.config["plugins"] = plugins
beets.plugins.load_plugins(plugins)
beets.plugins.find_plugins()
@@ -224,10 +220,9 @@ def load_plugins(self, *plugins):
Album._queries.update(beets.plugins.named_queries(Album))
def unload_plugins(self):
- """Unload all plugins and remove the from the configuration.
- """
+ """Unload all plugins and remove the from the configuration."""
# FIXME this should eventually be handled by a plugin manager
- beets.config['plugins'] = []
+ beets.config["plugins"] = []
beets.plugins._classes = set()
beets.plugins._instances = {}
Item._types = Item._original_types
@@ -241,13 +236,13 @@ def create_importer(self, item_count=1, album_count=1):
Copies the specified number of files to a subdirectory of
`self.temp_dir` and creates a `ImportSessionFixture` for this path.
"""
- import_dir = os.path.join(self.temp_dir, b'import')
+ import_dir = os.path.join(self.temp_dir, b"import")
if not os.path.isdir(syspath(import_dir)):
os.mkdir(syspath(import_dir))
album_no = 0
while album_count:
- album = util.bytestring_path(f'album {album_no}')
+ album = util.bytestring_path(f"album {album_no}")
album_dir = os.path.join(import_dir, album)
if os.path.exists(syspath(album_dir)):
album_no += 1
@@ -258,9 +253,9 @@ def create_importer(self, item_count=1, album_count=1):
track_no = 0
album_item_count = item_count
while album_item_count:
- title = f'track {track_no}'
- src = os.path.join(_common.RSRC, b'full.mp3')
- title_file = util.bytestring_path(f'{title}.mp3')
+ title = f"track {track_no}"
+ src = os.path.join(_common.RSRC, b"full.mp3")
+ title_file = util.bytestring_path(f"{title}.mp3")
dest = os.path.join(album_dir, title_file)
if os.path.exists(syspath(dest)):
track_no += 1
@@ -268,22 +263,25 @@ def create_importer(self, item_count=1, album_count=1):
album_item_count -= 1
shutil.copy(syspath(src), syspath(dest))
mediafile = MediaFile(dest)
- mediafile.update({
- 'artist': 'artist',
- 'albumartist': 'album artist',
- 'title': title,
- 'album': album,
- 'mb_albumid': None,
- 'mb_trackid': None,
- })
+ mediafile.update(
+ {
+ "artist": "artist",
+ "albumartist": "album artist",
+ "title": title,
+ "album": album,
+ "mb_albumid": None,
+ "mb_trackid": None,
+ }
+ )
mediafile.save()
- config['import']['quiet'] = True
- config['import']['autotag'] = False
- config['import']['resume'] = False
+ config["import"]["quiet"] = True
+ config["import"]["autotag"] = False
+ config["import"]["resume"] = False
- return ImportSessionFixture(self.lib, loghandler=None, query=None,
- paths=[import_dir])
+ return ImportSessionFixture(
+ self.lib, loghandler=None, query=None, paths=[import_dir]
+ )
# Library fixtures methods
@@ -301,18 +299,18 @@ def create_item(self, **values):
"""
item_count = self._get_item_count()
values_ = {
- 'title': 't\u00eftle {0}',
- 'artist': 'the \u00e4rtist',
- 'album': 'the \u00e4lbum',
- 'track': item_count,
- 'format': 'MP3',
+ "title": "t\u00eftle {0}",
+ "artist": "the \u00e4rtist",
+ "album": "the \u00e4lbum",
+ "track": item_count,
+ "format": "MP3",
}
values_.update(values)
- values_['title'] = values_['title'].format(item_count)
- values_['db'] = self.lib
+ values_["title"] = values_["title"].format(item_count)
+ values_["db"] = self.lib
item = Item(**values_)
- if 'path' not in values:
- item['path'] = 'audio.' + item['format'].lower()
+ if "path" not in values:
+ item["path"] = "audio." + item["format"].lower()
# mtime needs to be set last since other assignments reset it.
item.mtime = 12345
return item
@@ -326,26 +324,26 @@ def add_item(self, **values):
"""
# When specifying a path, store it normalized (as beets does
# ordinarily).
- if 'path' in values:
- values['path'] = util.normpath(values['path'])
+ if "path" in values:
+ values["path"] = util.normpath(values["path"])
item = self.create_item(**values)
item.add(self.lib)
# Ensure every item has a path.
- if 'path' not in values:
- item['path'] = item.destination()
+ if "path" not in values:
+ item["path"] = item.destination()
item.store()
return item
def add_item_fixture(self, **values):
- """Add an item with an actual audio file to the library.
- """
+ """Add an item with an actual audio file to the library."""
item = self.create_item(**values)
- extension = item['format'].lower()
- item['path'] = os.path.join(_common.RSRC,
- util.bytestring_path('min.' + extension))
+ extension = item["format"].lower()
+ item["path"] = os.path.join(
+ _common.RSRC, util.bytestring_path("min." + extension)
+ )
item.add(self.lib)
item.move(operation=MoveOperation.COPY)
item.store()
@@ -355,16 +353,15 @@ def add_album(self, **values):
item = self.add_item(**values)
return self.lib.add_album([item])
- def add_item_fixtures(self, ext='mp3', count=1):
- """Add a number of items with files to the database.
- """
+ def add_item_fixtures(self, ext="mp3", count=1):
+ """Add a number of items with files to the database."""
# TODO base this on `add_item()`
items = []
- path = os.path.join(_common.RSRC, util.bytestring_path('full.' + ext))
+ path = os.path.join(_common.RSRC, util.bytestring_path("full." + ext))
for i in range(count):
item = Item.from_path(path)
- item.album = f'\u00e4lbum {i}' # Check unicode paths
- item.title = f't\u00eftle {i}'
+ item.album = f"\u00e4lbum {i}" # Check unicode paths
+ item.title = f"t\u00eftle {i}"
# mtime needs to be set last since other assignments reset it.
item.mtime = 12345
item.add(self.lib)
@@ -376,22 +373,21 @@ def add_item_fixtures(self, ext='mp3', count=1):
def add_album_fixture(
self,
track_count=1,
- fname='full',
- ext='mp3',
+ fname="full",
+ ext="mp3",
disc_count=1,
):
- """Add an album with files to the database.
- """
+ """Add an album with files to the database."""
items = []
path = os.path.join(
_common.RSRC,
- util.bytestring_path(f'{fname}.{ext}'),
+ util.bytestring_path(f"{fname}.{ext}"),
)
for discnumber in range(1, disc_count + 1):
for i in range(track_count):
item = Item.from_path(path)
- item.album = '\u00e4lbum' # Check unicode paths
- item.title = f't\u00eftle {i}'
+ item.album = "\u00e4lbum" # Check unicode paths
+ item.title = f"t\u00eftle {i}"
item.disc = discnumber
# mtime needs to be set last since other assignments reset it.
item.mtime = 12345
@@ -401,7 +397,7 @@ def add_album_fixture(
items.append(item)
return self.lib.add_album(items)
- def create_mediafile_fixture(self, ext='mp3', images=[]):
+ def create_mediafile_fixture(self, ext="mp3", images=[]):
"""Copies a fixture mediafile with the extension to a temporary
location and returns the path.
@@ -412,7 +408,7 @@ def create_mediafile_fixture(self, ext='mp3', images=[]):
specified extension a cover art image is added to the media
file.
"""
- src = os.path.join(_common.RSRC, util.bytestring_path('full.' + ext))
+ src = os.path.join(_common.RSRC, util.bytestring_path("full." + ext))
handle, path = mkstemp()
path = bytestring_path(path)
os.close(handle)
@@ -422,26 +418,26 @@ def create_mediafile_fixture(self, ext='mp3', images=[]):
mediafile = MediaFile(path)
imgs = []
for img_ext in images:
- file = util.bytestring_path(f'image-2x3.{img_ext}')
+ file = util.bytestring_path(f"image-2x3.{img_ext}")
img_path = os.path.join(_common.RSRC, file)
- with open(img_path, 'rb') as f:
+ with open(img_path, "rb") as f:
imgs.append(Image(f.read()))
mediafile.images = imgs
mediafile.save()
- if not hasattr(self, '_mediafile_fixtures'):
+ if not hasattr(self, "_mediafile_fixtures"):
self._mediafile_fixtures = []
self._mediafile_fixtures.append(path)
return path
def remove_mediafile_fixtures(self):
- if hasattr(self, '_mediafile_fixtures'):
+ if hasattr(self, "_mediafile_fixtures"):
for path in self._mediafile_fixtures:
os.remove(syspath(path))
def _get_item_count(self):
- if not hasattr(self, '__item_count'):
+ if not hasattr(self, "__item_count"):
count = 0
self.__item_count = count + 1
return count
@@ -450,14 +446,14 @@ def _get_item_count(self):
def run_command(self, *args, **kwargs):
"""Run a beets command with an arbitrary amount of arguments. The
- Library` defaults to `self.lib`, but can be overridden with
- the keyword argument `lib`.
+ Library` defaults to `self.lib`, but can be overridden with
+ the keyword argument `lib`.
"""
- sys.argv = ['beet'] # avoid leakage from test suite args
+ sys.argv = ["beet"] # avoid leakage from test suite args
lib = None
- if hasattr(self, 'lib'):
+ if hasattr(self, "lib"):
lib = self.lib
- lib = kwargs.get('lib', lib)
+ lib = kwargs.get("lib", lib)
beets.ui._raw_main(_convert_args(list(args)), lib)
def run_with_output(self, *args):
@@ -475,11 +471,10 @@ def create_temp_dir(self):
self.temp_dir = util.bytestring_path(temp_dir)
def remove_temp_dir(self):
- """Delete the temporary directory created by `create_temp_dir`.
- """
+ """Delete the temporary directory created by `create_temp_dir`."""
shutil.rmtree(syspath(self.temp_dir))
- def touch(self, path, dir=None, content=''):
+ def touch(self, path, dir=None, content=""):
"""Create a file at `path` with given content.
If `dir` is given, it is prepended to `path`. After that, if the
@@ -496,7 +491,7 @@ def touch(self, path, dir=None, content=''):
if not os.path.isdir(syspath(parent)):
os.makedirs(syspath(parent))
- with open(syspath(path), 'a+') as f:
+ with open(syspath(path), "a+") as f:
f.write(content)
return path
@@ -544,9 +539,9 @@ def choose_match(self, task):
choose_item = choose_match
- Resolution = Enum('Resolution', 'REMOVE SKIP KEEPBOTH MERGE')
+ Resolution = Enum("Resolution", "REMOVE SKIP KEEPBOTH MERGE")
- default_resolution = 'REMOVE'
+ default_resolution = "REMOVE"
def add_resolution(self, resolution):
assert isinstance(resolution, self.Resolution)
@@ -577,43 +572,65 @@ def generate_album_info(album_id, track_values):
"""
tracks = [generate_track_info(id, values) for id, values in track_values]
album = AlbumInfo(
- album_id='album info',
- album='album info',
- artist='album info',
- artist_id='album info',
+ album_id="album info",
+ album="album info",
+ artist="album info",
+ artist_id="album info",
tracks=tracks,
)
for field in ALBUM_INFO_FIELDS:
- setattr(album, field, 'album info')
+ setattr(album, field, "album info")
return album
-ALBUM_INFO_FIELDS = ['album', 'album_id', 'artist', 'artist_id',
- 'asin', 'albumtype', 'va', 'label',
- 'artist_sort', 'releasegroup_id', 'catalognum',
- 'language', 'country', 'albumstatus', 'media',
- 'albumdisambig', 'releasegroupdisambig', 'artist_credit',
- 'data_source', 'data_url']
-
-
-def generate_track_info(track_id='track info', values={}):
+ALBUM_INFO_FIELDS = [
+ "album",
+ "album_id",
+ "artist",
+ "artist_id",
+ "asin",
+ "albumtype",
+ "va",
+ "label",
+ "artist_sort",
+ "releasegroup_id",
+ "catalognum",
+ "language",
+ "country",
+ "albumstatus",
+ "media",
+ "albumdisambig",
+ "releasegroupdisambig",
+ "artist_credit",
+ "data_source",
+ "data_url",
+]
+
+
+def generate_track_info(track_id="track info", values={}):
"""Return `TrackInfo` populated with mock data.
The `track_id` field is set to the corresponding argument. All other
string fields are set to "track info".
"""
track = TrackInfo(
- title='track info',
+ title="track info",
track_id=track_id,
)
for field in TRACK_INFO_FIELDS:
- setattr(track, field, 'track info')
+ setattr(track, field, "track info")
for field, value in values.items():
setattr(track, field, value)
return track
-TRACK_INFO_FIELDS = ['artist', 'artist_id', 'artist_sort',
- 'disctitle', 'artist_credit', 'data_source',
- 'data_url']
+TRACK_INFO_FIELDS = [
+ "artist",
+ "artist_id",
+ "artist_sort",
+ "disctitle",
+ "artist_credit",
+ "data_source",
+ "data_url",
+]
diff --git a/test/plugins/lyrics_download_samples.py b/test/plugins/lyrics_download_samples.py
index 1124880872..a59e046446 100644
--- a/test/plugins/lyrics_download_samples.py
+++ b/test/plugins/lyrics_download_samples.py
@@ -15,10 +15,10 @@
import os
import sys
-import requests
-
from test.plugins import test_lyrics
+import requests
+
def mkdir_p(path):
try:
@@ -31,26 +31,25 @@ def mkdir_p(path):
def safe_open_w(path):
- """Open "path" for writing, creating any parent directories as needed.
- """
+ """Open "path" for writing, creating any parent directories as needed."""
mkdir_p(os.path.dirname(path))
- return open(path, 'w')
+ return open(path, "w")
def main(argv=None):
- """Download one lyrics sample page per referenced source.
- """
+ """Download one lyrics sample page per referenced source."""
if argv is None:
argv = sys.argv
- print('Fetching samples from:')
+ print("Fetching samples from:")
for s in test_lyrics.GOOGLE_SOURCES + test_lyrics.DEFAULT_SOURCES:
- print(s['url'])
- url = s['url'] + s['path']
+ print(s["url"])
+ url = s["url"] + s["path"]
fn = test_lyrics.url_to_filename(url)
if not os.path.isfile(fn):
html = requests.get(url, verify=False).text
with safe_open_w(fn) as f:
- f.write(html.encode('utf-8'))
+ f.write(html.encode("utf-8"))
+
if __name__ == "__main__":
sys.exit(main())
diff --git a/test/plugins/test_acousticbrainz.py b/test/plugins/test_acousticbrainz.py
index 880f554384..76167accc0 100644
--- a/test/plugins/test_acousticbrainz.py
+++ b/test/plugins/test_acousticbrainz.py
@@ -19,83 +19,84 @@
import json
import os.path
import unittest
-
from test._common import RSRC
-from beetsplug.acousticbrainz import AcousticPlugin, ABSCHEME
+from beetsplug.acousticbrainz import ABSCHEME, AcousticPlugin
class MapDataToSchemeTest(unittest.TestCase):
def test_basic(self):
ab = AcousticPlugin()
- data = {'key 1': 'value 1', 'key 2': 'value 2'}
- scheme = {'key 1': 'attribute 1', 'key 2': 'attribute 2'}
+ data = {"key 1": "value 1", "key 2": "value 2"}
+ scheme = {"key 1": "attribute 1", "key 2": "attribute 2"}
mapping = set(ab._map_data_to_scheme(data, scheme))
- self.assertEqual(mapping, {('attribute 1', 'value 1'),
- ('attribute 2', 'value 2')})
+ self.assertEqual(
+ mapping, {("attribute 1", "value 1"), ("attribute 2", "value 2")}
+ )
def test_recurse(self):
ab = AcousticPlugin()
data = {
- 'key': 'value',
- 'group': {
- 'subkey': 'subvalue',
- 'subgroup': {
- 'subsubkey': 'subsubvalue'
- }
- }
+ "key": "value",
+ "group": {
+ "subkey": "subvalue",
+ "subgroup": {"subsubkey": "subsubvalue"},
+ },
}
scheme = {
- 'key': 'attribute 1',
- 'group': {
- 'subkey': 'attribute 2',
- 'subgroup': {
- 'subsubkey': 'attribute 3'
- }
- }
+ "key": "attribute 1",
+ "group": {
+ "subkey": "attribute 2",
+ "subgroup": {"subsubkey": "attribute 3"},
+ },
}
mapping = set(ab._map_data_to_scheme(data, scheme))
- self.assertEqual(mapping, {('attribute 1', 'value'),
- ('attribute 2', 'subvalue'),
- ('attribute 3', 'subsubvalue')})
+ self.assertEqual(
+ mapping,
+ {
+ ("attribute 1", "value"),
+ ("attribute 2", "subvalue"),
+ ("attribute 3", "subsubvalue"),
+ },
+ )
def test_composite(self):
ab = AcousticPlugin()
- data = {'key 1': 'part 1', 'key 2': 'part 2'}
- scheme = {'key 1': ('attribute', 0), 'key 2': ('attribute', 1)}
+ data = {"key 1": "part 1", "key 2": "part 2"}
+ scheme = {"key 1": ("attribute", 0), "key 2": ("attribute", 1)}
mapping = set(ab._map_data_to_scheme(data, scheme))
- self.assertEqual(mapping, {('attribute', 'part 1 part 2')})
+ self.assertEqual(mapping, {("attribute", "part 1 part 2")})
def test_realistic(self):
ab = AcousticPlugin()
- data_path = os.path.join(RSRC, b'acousticbrainz/data.json')
+ data_path = os.path.join(RSRC, b"acousticbrainz/data.json")
with open(data_path) as res:
data = json.load(res)
mapping = set(ab._map_data_to_scheme(data, ABSCHEME))
expected = {
- ('chords_key', 'A'),
- ('average_loudness', 0.815025985241),
- ('mood_acoustic', 0.415711194277),
- ('chords_changes_rate', 0.0445116683841),
- ('tonal', 0.874250173569),
- ('mood_sad', 0.299694597721),
- ('bpm', 162.532119751),
- ('gender', 'female'),
- ('initial_key', 'A minor'),
- ('chords_number_rate', 0.00194468453992),
- ('mood_relaxed', 0.123632438481),
- ('chords_scale', 'minor'),
- ('voice_instrumental', 'instrumental'),
- ('key_strength', 0.636936545372),
- ('genre_rosamerica', 'roc'),
- ('mood_party', 0.234383180737),
- ('mood_aggressive', 0.0779221653938),
- ('danceable', 0.143928021193),
- ('rhythm', 'VienneseWaltz'),
- ('mood_electronic', 0.339881360531),
- ('mood_happy', 0.0894767045975),
- ('moods_mirex', "Cluster3"),
- ('timbre', "bright")
+ ("chords_key", "A"),
+ ("average_loudness", 0.815025985241),
+ ("mood_acoustic", 0.415711194277),
+ ("chords_changes_rate", 0.0445116683841),
+ ("tonal", 0.874250173569),
+ ("mood_sad", 0.299694597721),
+ ("bpm", 162.532119751),
+ ("gender", "female"),
+ ("initial_key", "A minor"),
+ ("chords_number_rate", 0.00194468453992),
+ ("mood_relaxed", 0.123632438481),
+ ("chords_scale", "minor"),
+ ("voice_instrumental", "instrumental"),
+ ("key_strength", 0.636936545372),
+ ("genre_rosamerica", "roc"),
+ ("mood_party", 0.234383180737),
+ ("mood_aggressive", 0.0779221653938),
+ ("danceable", 0.143928021193),
+ ("rhythm", "VienneseWaltz"),
+ ("mood_electronic", 0.339881360531),
+ ("mood_happy", 0.0894767045975),
+ ("moods_mirex", "Cluster3"),
+ ("timbre", "bright"),
}
self.assertEqual(mapping, expected)
@@ -103,5 +104,6 @@ def test_realistic(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_albumtypes.py b/test/plugins/test_albumtypes.py
index 3d329dd7b8..d50e2263c7 100644
--- a/test/plugins/test_albumtypes.py
+++ b/test/plugins/test_albumtypes.py
@@ -16,10 +16,10 @@
import unittest
+from test.helper import TestHelper
from beets.autotag.mb import VARIOUS_ARTISTS_ID
from beetsplug.albumtypes import AlbumTypesPlugin
-from test.helper import TestHelper
class AlbumTypesPluginTest(unittest.TestCase, TestHelper):
@@ -28,7 +28,7 @@ class AlbumTypesPluginTest(unittest.TestCase, TestHelper):
def setUp(self):
"""Set up tests."""
self.setup_beets()
- self.load_plugins('albumtypes')
+ self.load_plugins("albumtypes")
def tearDown(self):
"""Tear down tests."""
@@ -38,74 +38,72 @@ def tearDown(self):
def test_renames_types(self):
"""Tests if the plugin correctly renames the specified types."""
self._set_config(
- types=[('ep', 'EP'), ('remix', 'Remix')],
- ignore_va=[],
- bracket='()'
+ types=[("ep", "EP"), ("remix", "Remix")], ignore_va=[], bracket="()"
)
- album = self._create_album(album_types=['ep', 'remix'])
+ album = self._create_album(album_types=["ep", "remix"])
subject = AlbumTypesPlugin()
result = subject._atypes(album)
- self.assertEqual('(EP)(Remix)', result)
+ self.assertEqual("(EP)(Remix)", result)
return
def test_returns_only_specified_types(self):
"""Tests if the plugin returns only non-blank types given in config."""
self._set_config(
- types=[('ep', 'EP'), ('soundtrack', '')],
- ignore_va=[],
- bracket='()'
+ types=[("ep", "EP"), ("soundtrack", "")], ignore_va=[], bracket="()"
)
- album = self._create_album(album_types=['ep', 'remix', 'soundtrack'])
+ album = self._create_album(album_types=["ep", "remix", "soundtrack"])
subject = AlbumTypesPlugin()
result = subject._atypes(album)
- self.assertEqual('(EP)', result)
+ self.assertEqual("(EP)", result)
def test_respects_type_order(self):
"""Tests if the types are returned in the same order as config."""
self._set_config(
- types=[('remix', 'Remix'), ('ep', 'EP')],
- ignore_va=[],
- bracket='()'
+ types=[("remix", "Remix"), ("ep", "EP")], ignore_va=[], bracket="()"
)
- album = self._create_album(album_types=['ep', 'remix'])
+ album = self._create_album(album_types=["ep", "remix"])
subject = AlbumTypesPlugin()
result = subject._atypes(album)
- self.assertEqual('(Remix)(EP)', result)
+ self.assertEqual("(Remix)(EP)", result)
return
def test_ignores_va(self):
"""Tests if the specified type is ignored for VA albums."""
self._set_config(
- types=[('ep', 'EP'), ('soundtrack', 'OST')],
- ignore_va=['ep'],
- bracket='()'
+ types=[("ep", "EP"), ("soundtrack", "OST")],
+ ignore_va=["ep"],
+ bracket="()",
)
album = self._create_album(
- album_types=['ep', 'soundtrack'],
- artist_id=VARIOUS_ARTISTS_ID
+ album_types=["ep", "soundtrack"], artist_id=VARIOUS_ARTISTS_ID
)
subject = AlbumTypesPlugin()
result = subject._atypes(album)
- self.assertEqual('(OST)', result)
+ self.assertEqual("(OST)", result)
def test_respects_defaults(self):
"""Tests if the plugin uses the default values if config not given."""
album = self._create_album(
- album_types=['ep', 'single', 'soundtrack', 'live', 'compilation',
- 'remix'],
- artist_id=VARIOUS_ARTISTS_ID
+ album_types=[
+ "ep",
+ "single",
+ "soundtrack",
+ "live",
+ "compilation",
+ "remix",
+ ],
+ artist_id=VARIOUS_ARTISTS_ID,
)
subject = AlbumTypesPlugin()
result = subject._atypes(album)
- self.assertEqual('[EP][Single][OST][Live][Remix]', result)
+ self.assertEqual("[EP][Single][OST][Live][Remix]", result)
def _set_config(self, types: [(str, str)], ignore_va: [str], bracket: str):
- self.config['albumtypes']['types'] = types
- self.config['albumtypes']['ignore_va'] = ignore_va
- self.config['albumtypes']['bracket'] = bracket
+ self.config["albumtypes"]["types"] = types
+ self.config["albumtypes"]["ignore_va"] = ignore_va
+ self.config["albumtypes"]["bracket"] = bracket
def _create_album(self, album_types: [str], artist_id: str = 0):
return self.add_album(
- albumtypes=album_types,
- mb_albumartistid=artist_id
+ albumtypes=album_types, mb_albumartistid=artist_id
)
diff --git a/test/plugins/test_art.py b/test/plugins/test_art.py
index 62b7393a49..a691fa6021 100644
--- a/test/plugins/test_art.py
+++ b/test/plugins/test_art.py
@@ -18,31 +18,27 @@
import os
import shutil
import unittest
+from test import _common
+from test.helper import capture_log
+from unittest.mock import patch
+import confuse
import responses
-from unittest.mock import patch
-from test import _common
-from test.helper import capture_log
-from beetsplug import fetchart
+from beets import config, importer, library, logging, util
from beets.autotag import AlbumInfo, AlbumMatch
-from beets import config
-from beets import library
-from beets import importer
-from beets import logging
-from beets import util
from beets.util import syspath
from beets.util.artresizer import ArtResizer
-import confuse
-
+from beetsplug import fetchart
-logger = logging.getLogger('beets.test_art')
+logger = logging.getLogger("beets.test_art")
-class Settings():
+class Settings:
"""Used to pass settings to the ArtSources when the plugin isn't fully
instantiated.
"""
+
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
@@ -58,32 +54,36 @@ class FetchImageHelper(_common.TestCase):
"""Helper mixin for mocking requests when fetching images
with remote art sources.
"""
+
@responses.activate
def run(self, *args, **kwargs):
super().run(*args, **kwargs)
- IMAGEHEADER = {'image/jpeg': b'\x00' * 6 + b'JFIF',
- 'image/png': b'\211PNG\r\n\032\n', }
+ IMAGEHEADER = {
+ "image/jpeg": b"\x00" * 6 + b"JFIF",
+ "image/png": b"\211PNG\r\n\032\n",
+ }
- def mock_response(self, url, content_type='image/jpeg', file_type=None):
+ def mock_response(self, url, content_type="image/jpeg", file_type=None):
if file_type is None:
file_type = content_type
- responses.add(responses.GET, url,
- content_type=content_type,
- # imghdr reads 32 bytes
- body=self.IMAGEHEADER.get(
- file_type, b'').ljust(32, b'\x00'))
+ responses.add(
+ responses.GET,
+ url,
+ content_type=content_type,
+ # imghdr reads 32 bytes
+ body=self.IMAGEHEADER.get(file_type, b"").ljust(32, b"\x00"),
+ )
-class CAAHelper():
+class CAAHelper:
"""Helper mixin for mocking requests to the Cover Art Archive."""
- MBID_RELASE = 'rid'
- MBID_GROUP = 'rgid'
- RELEASE_URL = 'coverartarchive.org/release/{}' \
- .format(MBID_RELASE)
- GROUP_URL = 'coverartarchive.org/release-group/{}' \
- .format(MBID_GROUP)
+ MBID_RELASE = "rid"
+ MBID_GROUP = "rgid"
+
+ RELEASE_URL = "coverartarchive.org/release/{}".format(MBID_RELASE)
+ GROUP_URL = "coverartarchive.org/release-group/{}".format(MBID_GROUP)
RELEASE_URL = "https://" + RELEASE_URL
GROUP_URL = "https://" + GROUP_URL
@@ -207,71 +207,71 @@ class CAAHelper():
}"""
def mock_caa_response(self, url, json):
- responses.add(responses.GET, url, body=json,
- content_type='application/json')
+ responses.add(
+ responses.GET, url, body=json, content_type="application/json"
+ )
class FetchImageTest(FetchImageHelper, UseThePlugin):
- URL = 'http://example.com/test.jpg'
+ URL = "http://example.com/test.jpg"
def setUp(self):
super().setUp()
- self.dpath = os.path.join(self.temp_dir, b'arttest')
+ self.dpath = os.path.join(self.temp_dir, b"arttest")
self.source = fetchart.RemoteArtSource(logger, self.plugin.config)
self.settings = Settings(maxwidth=0)
self.candidate = fetchart.Candidate(logger, url=self.URL)
def test_invalid_type_returns_none(self):
- self.mock_response(self.URL, 'image/watercolour')
+ self.mock_response(self.URL, "image/watercolour")
self.source.fetch_image(self.candidate, self.settings)
self.assertEqual(self.candidate.path, None)
def test_jpeg_type_returns_path(self):
- self.mock_response(self.URL, 'image/jpeg')
+ self.mock_response(self.URL, "image/jpeg")
self.source.fetch_image(self.candidate, self.settings)
self.assertNotEqual(self.candidate.path, None)
def test_extension_set_by_content_type(self):
- self.mock_response(self.URL, 'image/png')
+ self.mock_response(self.URL, "image/png")
self.source.fetch_image(self.candidate, self.settings)
- self.assertEqual(os.path.splitext(self.candidate.path)[1], b'.png')
+ self.assertEqual(os.path.splitext(self.candidate.path)[1], b".png")
self.assertExists(self.candidate.path)
def test_does_not_rely_on_server_content_type(self):
- self.mock_response(self.URL, 'image/jpeg', 'image/png')
+ self.mock_response(self.URL, "image/jpeg", "image/png")
self.source.fetch_image(self.candidate, self.settings)
- self.assertEqual(os.path.splitext(self.candidate.path)[1], b'.png')
+ self.assertEqual(os.path.splitext(self.candidate.path)[1], b".png")
self.assertExists(self.candidate.path)
class FSArtTest(UseThePlugin):
def setUp(self):
super().setUp()
- self.dpath = os.path.join(self.temp_dir, b'arttest')
+ self.dpath = os.path.join(self.temp_dir, b"arttest")
os.mkdir(syspath(self.dpath))
self.source = fetchart.FileSystem(logger, self.plugin.config)
- self.settings = Settings(cautious=False,
- cover_names=('art',))
+ self.settings = Settings(cautious=False, cover_names=("art",))
def test_finds_jpg_in_directory(self):
- _common.touch(os.path.join(self.dpath, b'a.jpg'))
+ _common.touch(os.path.join(self.dpath, b"a.jpg"))
candidate = next(self.source.get(None, self.settings, [self.dpath]))
- self.assertEqual(candidate.path, os.path.join(self.dpath, b'a.jpg'))
+ self.assertEqual(candidate.path, os.path.join(self.dpath, b"a.jpg"))
def test_appropriately_named_file_takes_precedence(self):
- _common.touch(os.path.join(self.dpath, b'a.jpg'))
- _common.touch(os.path.join(self.dpath, b'art.jpg'))
+ _common.touch(os.path.join(self.dpath, b"a.jpg"))
+ _common.touch(os.path.join(self.dpath, b"art.jpg"))
candidate = next(self.source.get(None, self.settings, [self.dpath]))
- self.assertEqual(candidate.path, os.path.join(self.dpath, b'art.jpg'))
+ self.assertEqual(candidate.path, os.path.join(self.dpath, b"art.jpg"))
def test_non_image_file_not_identified(self):
- _common.touch(os.path.join(self.dpath, b'a.txt'))
+ _common.touch(os.path.join(self.dpath, b"a.txt"))
with self.assertRaises(StopIteration):
next(self.source.get(None, self.settings, [self.dpath]))
def test_cautious_skips_fallback(self):
- _common.touch(os.path.join(self.dpath, b'a.jpg'))
+ _common.touch(os.path.join(self.dpath, b"a.jpg"))
self.settings.cautious = True
with self.assertRaises(StopIteration):
next(self.source.get(None, self.settings, [self.dpath]))
@@ -281,27 +281,29 @@ def test_empty_dir(self):
next(self.source.get(None, self.settings, [self.dpath]))
def test_precedence_amongst_correct_files(self):
- images = [b'front-cover.jpg', b'front.jpg', b'back.jpg']
+ images = [b"front-cover.jpg", b"front.jpg", b"back.jpg"]
paths = [os.path.join(self.dpath, i) for i in images]
for p in paths:
_common.touch(p)
- self.settings.cover_names = ['cover', 'front', 'back']
- candidates = [candidate.path for candidate in
- self.source.get(None, self.settings, [self.dpath])]
+ self.settings.cover_names = ["cover", "front", "back"]
+ candidates = [
+ candidate.path
+ for candidate in self.source.get(None, self.settings, [self.dpath])
+ ]
self.assertEqual(candidates, paths)
class CombinedTest(FetchImageHelper, UseThePlugin, CAAHelper):
- ASIN = 'xxxx'
- MBID = 'releaseid'
- AMAZON_URL = 'https://images.amazon.com/images/P/{}.01.LZZZZZZZ.jpg' \
- .format(ASIN)
- AAO_URL = 'https://www.albumart.org/index_detail.php?asin={}' \
- .format(ASIN)
+ ASIN = "xxxx"
+ MBID = "releaseid"
+ AMAZON_URL = "https://images.amazon.com/images/P/{}.01.LZZZZZZZ.jpg".format(
+ ASIN
+ )
+ AAO_URL = "https://www.albumart.org/index_detail.php?asin={}".format(ASIN)
def setUp(self):
super().setUp()
- self.dpath = os.path.join(self.temp_dir, b'arttest')
+ self.dpath = os.path.join(self.temp_dir, b"arttest")
os.mkdir(syspath(self.dpath))
def test_main_interface_returns_amazon_art(self):
@@ -316,12 +318,12 @@ def test_main_interface_returns_none_for_missing_asin_and_path(self):
self.assertIsNone(candidate)
def test_main_interface_gives_precedence_to_fs_art(self):
- _common.touch(os.path.join(self.dpath, b'art.jpg'))
+ _common.touch(os.path.join(self.dpath, b"art.jpg"))
self.mock_response(self.AMAZON_URL)
album = _common.Bag(asin=self.ASIN)
candidate = self.plugin.art_for_album(album, [self.dpath])
self.assertIsNotNone(candidate)
- self.assertEqual(candidate.path, os.path.join(self.dpath, b'art.jpg'))
+ self.assertEqual(candidate.path, os.path.join(self.dpath, b"art.jpg"))
def test_main_interface_falls_back_to_amazon(self):
self.mock_response(self.AMAZON_URL)
@@ -338,7 +340,7 @@ def test_main_interface_tries_amazon_before_aao(self):
self.assertEqual(responses.calls[0].request.url, self.AMAZON_URL)
def test_main_interface_falls_back_to_aao(self):
- self.mock_response(self.AMAZON_URL, content_type='text/html')
+ self.mock_response(self.AMAZON_URL, content_type="text/html")
album = _common.Bag(asin=self.ASIN)
self.plugin.art_for_album(album, [self.dpath])
self.assertEqual(responses.calls[-1].request.url, self.AAO_URL)
@@ -346,13 +348,19 @@ def test_main_interface_falls_back_to_aao(self):
def test_main_interface_uses_caa_when_mbid_available(self):
self.mock_caa_response(self.RELEASE_URL, self.RESPONSE_RELEASE)
self.mock_caa_response(self.GROUP_URL, self.RESPONSE_GROUP)
- self.mock_response('http://coverartarchive.org/release/rid/12345.gif',
- content_type='image/gif')
- self.mock_response('http://coverartarchive.org/release/rid/12345.jpg',
- content_type='image/jpeg')
- album = _common.Bag(mb_albumid=self.MBID_RELASE,
- mb_releasegroupid=self.MBID_GROUP,
- asin=self.ASIN)
+ self.mock_response(
+ "http://coverartarchive.org/release/rid/12345.gif",
+ content_type="image/gif",
+ )
+ self.mock_response(
+ "http://coverartarchive.org/release/rid/12345.jpg",
+ content_type="image/jpeg",
+ )
+ album = _common.Bag(
+ mb_albumid=self.MBID_RELASE,
+ mb_releasegroupid=self.MBID_GROUP,
+ asin=self.ASIN,
+ )
candidate = self.plugin.art_for_album(album, None)
self.assertIsNotNone(candidate)
self.assertEqual(len(responses.calls), 3)
@@ -364,18 +372,19 @@ def test_local_only_does_not_access_network(self):
self.assertEqual(len(responses.calls), 0)
def test_local_only_gets_fs_image(self):
- _common.touch(os.path.join(self.dpath, b'art.jpg'))
+ _common.touch(os.path.join(self.dpath, b"art.jpg"))
album = _common.Bag(mb_albumid=self.MBID, asin=self.ASIN)
- candidate = self.plugin.art_for_album(album, [self.dpath],
- local_only=True)
+ candidate = self.plugin.art_for_album(
+ album, [self.dpath], local_only=True
+ )
self.assertIsNotNone(candidate)
- self.assertEqual(candidate.path, os.path.join(self.dpath, b'art.jpg'))
+ self.assertEqual(candidate.path, os.path.join(self.dpath, b"art.jpg"))
self.assertEqual(len(responses.calls), 0)
class AAOTest(UseThePlugin):
- ASIN = 'xxxx'
- AAO_URL = f'https://www.albumart.org/index_detail.php?asin={ASIN}'
+ ASIN = "xxxx"
+ AAO_URL = f"https://www.albumart.org/index_detail.php?asin={ASIN}"
def setUp(self):
super().setUp()
@@ -387,7 +396,7 @@ def run(self, *args, **kwargs):
super().run(*args, **kwargs)
def mock_response(self, url, body):
- responses.add(responses.GET, url, body=body, content_type='text/html')
+ responses.add(responses.GET, url, body=body, content_type="text/html")
def test_aao_scraper_finds_image(self):
body = """
@@ -400,10 +409,10 @@ def test_aao_scraper_finds_image(self):
self.mock_response(self.AAO_URL, body)
album = _common.Bag(asin=self.ASIN)
candidate = next(self.source.get(album, self.settings, []))
- self.assertEqual(candidate.url, 'TARGET_URL')
+ self.assertEqual(candidate.url, "TARGET_URL")
def test_aao_scraper_returns_no_result_when_no_image_present(self):
- self.mock_response(self.AAO_URL, 'blah blah')
+ self.mock_response(self.AAO_URL, "blah blah")
album = _common.Bag(asin=self.ASIN)
with self.assertRaises(StopIteration):
next(self.source.get(album, self.settings, []))
@@ -421,8 +430,9 @@ def run(self, *args, **kwargs):
super().run(*args, **kwargs)
def mock_response(self, url, json):
- responses.add(responses.GET, url, body=json,
- content_type='application/json')
+ responses.add(
+ responses.GET, url, body=json, content_type="application/json"
+ )
def test_itunesstore_finds_image(self):
json = """{
@@ -437,7 +447,7 @@ def test_itunesstore_finds_image(self):
}"""
self.mock_response(fetchart.ITunesStore.API_URL, json)
candidate = next(self.source.get(self.album, self.settings, []))
- self.assertEqual(candidate.url, 'url_to_the_image')
+ self.assertEqual(candidate.url, "url_to_the_image")
self.assertEqual(candidate.match, fetchart.Candidate.MATCH_EXACT)
def test_itunesstore_no_result(self):
@@ -445,17 +455,21 @@ def test_itunesstore_no_result(self):
self.mock_response(fetchart.ITunesStore.API_URL, json)
expected = "got no results"
- with capture_log('beets.test_art') as logs:
+ with capture_log("beets.test_art") as logs:
with self.assertRaises(StopIteration):
next(self.source.get(self.album, self.settings, []))
self.assertIn(expected, logs[1])
def test_itunesstore_requestexception(self):
- responses.add(responses.GET, fetchart.ITunesStore.API_URL,
- json={'error': 'not found'}, status=404)
- expected = 'iTunes search failed: 404 Client Error'
+ responses.add(
+ responses.GET,
+ fetchart.ITunesStore.API_URL,
+ json={"error": "not found"},
+ status=404,
+ )
+ expected = "iTunes search failed: 404 Client Error"
- with capture_log('beets.test_art') as logs:
+ with capture_log("beets.test_art") as logs:
with self.assertRaises(StopIteration):
next(self.source.get(self.album, self.settings, []))
self.assertIn(expected, logs[1])
@@ -472,7 +486,7 @@ def test_itunesstore_fallback_match(self):
}"""
self.mock_response(fetchart.ITunesStore.API_URL, json)
candidate = next(self.source.get(self.album, self.settings, []))
- self.assertEqual(candidate.url, 'url_to_the_image')
+ self.assertEqual(candidate.url, "url_to_the_image")
self.assertEqual(candidate.match, fetchart.Candidate.MATCH_FALLBACK)
def test_itunesstore_returns_result_without_artwork(self):
@@ -486,9 +500,9 @@ def test_itunesstore_returns_result_without_artwork(self):
]
}"""
self.mock_response(fetchart.ITunesStore.API_URL, json)
- expected = 'Malformed itunes candidate'
+ expected = "Malformed itunes candidate"
- with capture_log('beets.test_art') as logs:
+ with capture_log("beets.test_art") as logs:
with self.assertRaises(StopIteration):
next(self.source.get(self.album, self.settings, []))
self.assertIn(expected, logs[1])
@@ -498,7 +512,7 @@ def test_itunesstore_returns_no_result_when_error_received(self):
self.mock_response(fetchart.ITunesStore.API_URL, json)
expected = "not found in json. Fields are"
- with capture_log('beets.test_art') as logs:
+ with capture_log("beets.test_art") as logs:
with self.assertRaises(StopIteration):
next(self.source.get(self.album, self.settings, []))
self.assertIn(expected, logs[1])
@@ -508,7 +522,7 @@ def test_itunesstore_returns_no_result_with_malformed_response(self):
self.mock_response(fetchart.ITunesStore.API_URL, json)
expected = "Could not decode json response:"
- with capture_log('beets.test_art') as logs:
+ with capture_log("beets.test_art") as logs:
with self.assertRaises(StopIteration):
next(self.source.get(self.album, self.settings, []))
self.assertIn(expected, logs[1])
@@ -525,15 +539,16 @@ def run(self, *args, **kwargs):
super().run(*args, **kwargs)
def mock_response(self, url, json):
- responses.add(responses.GET, url, body=json,
- content_type='application/json')
+ responses.add(
+ responses.GET, url, body=json, content_type="application/json"
+ )
def test_google_art_finds_image(self):
album = _common.Bag(albumartist="some artist", album="some album")
json = '{"items": [{"link": "url_to_the_image"}]}'
self.mock_response(fetchart.GoogleImages.URL, json)
candidate = next(self.source.get(album, self.settings, []))
- self.assertEqual(candidate.url, 'url_to_the_image')
+ self.assertEqual(candidate.url, "url_to_the_image")
def test_google_art_returns_no_result_when_error_received(self):
album = _common.Bag(albumartist="some artist", album="some album")
@@ -551,7 +566,6 @@ def test_google_art_returns_no_result_with_malformed_response(self):
class CoverArtArchiveTest(UseThePlugin, CAAHelper):
-
def setUp(self):
super().setUp()
self.source = fetchart.CoverArtArchive(logger, self.plugin.config)
@@ -562,8 +576,9 @@ def run(self, *args, **kwargs):
super().run(*args, **kwargs)
def test_caa_finds_image(self):
- album = _common.Bag(mb_albumid=self.MBID_RELASE,
- mb_releasegroupid=self.MBID_GROUP)
+ album = _common.Bag(
+ mb_albumid=self.MBID_RELASE, mb_releasegroupid=self.MBID_GROUP
+ )
self.mock_caa_response(self.RELEASE_URL, self.RESPONSE_RELEASE)
self.mock_caa_response(self.GROUP_URL, self.RESPONSE_GROUP)
candidates = list(self.source.get(album, self.settings, []))
@@ -600,7 +615,8 @@ def test_caa_finds_image_if_maxwidth_is_set_and_thumbnails_is_empty(self):
self.RELEASE_URL, self.RESPONSE_RELEASE_WITHOUT_THUMBNAILS
)
self.mock_caa_response(
- self.GROUP_URL, self.RESPONSE_GROUP_WITHOUT_THUMBNAILS,
+ self.GROUP_URL,
+ self.RESPONSE_GROUP_WITHOUT_THUMBNAILS,
)
candidates = list(self.source.get(album, self.settings, []))
self.assertEqual(len(candidates), 3)
@@ -676,35 +692,44 @@ def run(self, *args, **kwargs):
super().run(*args, **kwargs)
def mock_response(self, url, json):
- responses.add(responses.GET, url, body=json,
- content_type='application/json')
+ responses.add(
+ responses.GET, url, body=json, content_type="application/json"
+ )
def test_fanarttv_finds_image(self):
- album = _common.Bag(mb_releasegroupid='thereleasegroupid')
- self.mock_response(fetchart.FanartTV.API_ALBUMS + 'thereleasegroupid',
- self.RESPONSE_MULTIPLE)
+ album = _common.Bag(mb_releasegroupid="thereleasegroupid")
+ self.mock_response(
+ fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
+ self.RESPONSE_MULTIPLE,
+ )
candidate = next(self.source.get(album, self.settings, []))
- self.assertEqual(candidate.url, 'http://example.com/1.jpg')
+ self.assertEqual(candidate.url, "http://example.com/1.jpg")
def test_fanarttv_returns_no_result_when_error_received(self):
- album = _common.Bag(mb_releasegroupid='thereleasegroupid')
- self.mock_response(fetchart.FanartTV.API_ALBUMS + 'thereleasegroupid',
- self.RESPONSE_ERROR)
+ album = _common.Bag(mb_releasegroupid="thereleasegroupid")
+ self.mock_response(
+ fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
+ self.RESPONSE_ERROR,
+ )
with self.assertRaises(StopIteration):
next(self.source.get(album, self.settings, []))
def test_fanarttv_returns_no_result_with_malformed_response(self):
- album = _common.Bag(mb_releasegroupid='thereleasegroupid')
- self.mock_response(fetchart.FanartTV.API_ALBUMS + 'thereleasegroupid',
- self.RESPONSE_MALFORMED)
+ album = _common.Bag(mb_releasegroupid="thereleasegroupid")
+ self.mock_response(
+ fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
+ self.RESPONSE_MALFORMED,
+ )
with self.assertRaises(StopIteration):
next(self.source.get(album, self.settings, []))
def test_fanarttv_only_other_images(self):
# The source used to fail when there were images present, but no cover
- album = _common.Bag(mb_releasegroupid='thereleasegroupid')
- self.mock_response(fetchart.FanartTV.API_ALBUMS + 'thereleasegroupid',
- self.RESPONSE_NO_ART)
+ album = _common.Bag(mb_releasegroupid="thereleasegroupid")
+ self.mock_response(
+ fetchart.FanartTV.API_ALBUMS + "thereleasegroupid",
+ self.RESPONSE_NO_ART,
+ )
with self.assertRaises(StopIteration):
next(self.source.get(album, self.settings, []))
@@ -715,7 +740,7 @@ def setUp(self):
super().setUp()
# Mock the album art fetcher to always return our test file.
- self.art_file = os.path.join(self.temp_dir, b'tmpcover.jpg')
+ self.art_file = os.path.join(self.temp_dir, b"tmpcover.jpg")
_common.touch(self.art_file)
self.old_afa = self.plugin.art_for_album
self.afa_response = fetchart.Candidate(logger, path=self.art_file)
@@ -726,13 +751,13 @@ def art_for_album(i, p, local_only=False):
self.plugin.art_for_album = art_for_album
# Test library.
- self.libpath = os.path.join(self.temp_dir, b'tmplib.blb')
- self.libdir = os.path.join(self.temp_dir, b'tmplib')
+ self.libpath = os.path.join(self.temp_dir, b"tmplib.blb")
+ self.libdir = os.path.join(self.temp_dir, b"tmplib")
os.mkdir(syspath(self.libdir))
- os.mkdir(syspath(os.path.join(self.libdir, b'album')))
- itempath = os.path.join(self.libdir, b'album', b'test.mp3')
+ os.mkdir(syspath(os.path.join(self.libdir, b"album")))
+ itempath = os.path.join(self.libdir, b"album", b"test.mp3")
shutil.copyfile(
- syspath(os.path.join(_common.RSRC, b'full.mp3')),
+ syspath(os.path.join(_common.RSRC, b"full.mp3")),
syspath(itempath),
)
self.lib = library.Library(self.libpath)
@@ -749,10 +774,10 @@ def art_for_album(i, p, local_only=False):
self.task.is_album = True
self.task.album = self.album
info = AlbumInfo(
- album='some album',
- album_id='albumid',
- artist='some artist',
- artist_id='artistid',
+ album="some album",
+ album_id="albumid",
+ artist="some artist",
+ artist_id="artistid",
tracks=[],
)
self.task.set_choice(AlbumMatch(0, info, {}, set(), set()))
@@ -776,7 +801,7 @@ def _fetch_art(self, should_exist):
if should_exist:
self.assertEqual(
artpath,
- os.path.join(os.path.dirname(self.i.path), b'cover.jpg')
+ os.path.join(os.path.dirname(self.i.path), b"cover.jpg"),
)
self.assertExists(artpath)
else:
@@ -805,7 +830,7 @@ def test_delete_original_file(self):
self.assertNotExists(self.art_file)
def test_do_not_delete_original_if_already_in_place(self):
- artdest = os.path.join(os.path.dirname(self.i.path), b'cover.jpg')
+ artdest = os.path.join(os.path.dirname(self.i.path), b"cover.jpg")
shutil.copyfile(syspath(self.art_file), syspath(artdest))
self.afa_response = fetchart.Candidate(logger, path=artdest)
self._fetch_art(True)
@@ -819,19 +844,20 @@ def test_fetch_art_if_imported_file_deleted(self):
# message " has album art".
self._fetch_art(True)
util.remove(self.album.artpath)
- self.plugin.batch_fetch_art(self.lib, self.lib.albums(), force=False,
- quiet=False)
+ self.plugin.batch_fetch_art(
+ self.lib, self.lib.albums(), force=False, quiet=False
+ )
self.assertExists(self.album.artpath)
class ArtForAlbumTest(UseThePlugin):
- """ Tests that fetchart.art_for_album respects the scale & filesize
+ """Tests that fetchart.art_for_album respects the scale & filesize
configurations (e.g., minwidth, enforce_ratio, max_filesize)
"""
- IMG_225x225 = os.path.join(_common.RSRC, b'abbey.jpg')
- IMG_348x348 = os.path.join(_common.RSRC, b'abbey-different.jpg')
- IMG_500x490 = os.path.join(_common.RSRC, b'abbey-similar.jpg')
+ IMG_225x225 = os.path.join(_common.RSRC, b"abbey.jpg")
+ IMG_348x348 = os.path.join(_common.RSRC, b"abbey-different.jpg")
+ IMG_500x490 = os.path.join(_common.RSRC, b"abbey-similar.jpg")
IMG_225x225_SIZE = os.stat(util.syspath(IMG_225x225)).st_size
IMG_348x348_SIZE = os.stat(util.syspath(IMG_348x348)).st_size
@@ -857,7 +883,7 @@ def _assertImageIsValidArt(self, image_file, should_exist): # noqa
self.assertExists(image_file)
self.image_file = image_file
- candidate = self.plugin.art_for_album(self.album, [''], True)
+ candidate = self.plugin.art_for_album(self.album, [""], True)
if should_exist:
self.assertNotEqual(candidate, None)
@@ -868,8 +894,8 @@ def _assertImageIsValidArt(self, image_file, should_exist): # noqa
def _assertImageResized(self, image_file, should_resize): # noqa
self.image_file = image_file
- with patch.object(ArtResizer.shared, 'resize') as mock_resize:
- self.plugin.art_for_album(self.album, [''], True)
+ with patch.object(ArtResizer.shared, "resize") as mock_resize:
+ self.plugin.art_for_album(self.album, [""], True)
self.assertEqual(mock_resize.called, should_resize)
def _require_backend(self):
@@ -960,7 +986,7 @@ class DeprecatedConfigTest(_common.TestCase):
# plugin object
def setUp(self):
super().setUp()
- config['fetchart']['remote_priority'] = True
+ config["fetchart"]["remote_priority"] = True
self.plugin = fetchart.FetchArtPlugin()
def test_moves_filesystem_to_end(self):
@@ -973,26 +999,26 @@ class EnforceRatioConfigTest(_common.TestCase):
def _load_with_config(self, values, should_raise):
if should_raise:
for v in values:
- config['fetchart']['enforce_ratio'] = v
+ config["fetchart"]["enforce_ratio"] = v
with self.assertRaises(confuse.ConfigValueError):
fetchart.FetchArtPlugin()
else:
for v in values:
- config['fetchart']['enforce_ratio'] = v
+ config["fetchart"]["enforce_ratio"] = v
fetchart.FetchArtPlugin()
def test_px(self):
- self._load_with_config('0px 4px 12px 123px'.split(), False)
- self._load_with_config('00px stuff5px'.split(), True)
+ self._load_with_config("0px 4px 12px 123px".split(), False)
+ self._load_with_config("00px stuff5px".split(), True)
def test_percent(self):
- self._load_with_config('0% 0.00% 5.1% 5% 100%'.split(), False)
- self._load_with_config('00% 1.234% foo5% 100.1%'.split(), True)
+ self._load_with_config("0% 0.00% 5.1% 5% 100%".split(), False)
+ self._load_with_config("00% 1.234% foo5% 100.1%".split(), True)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_bareasc.py b/test/plugins/test_bareasc.py
index f8f24c8b62..c2357d5e76 100644
--- a/test/plugins/test_bareasc.py
+++ b/test/plugins/test_bareasc.py
@@ -5,8 +5,7 @@
import unittest
-
-from test.helper import capture_stdout, TestHelper
+from test.helper import TestHelper, capture_stdout
from beets import logging
@@ -17,56 +16,49 @@ class BareascPluginTest(unittest.TestCase, TestHelper):
def setUp(self):
"""Set up test environment for bare ASCII query matching."""
self.setup_beets()
- self.log = logging.getLogger('beets.web')
- self.config['bareasc']['prefix'] = '#'
- self.load_plugins('bareasc')
+ self.log = logging.getLogger("beets.web")
+ self.config["bareasc"]["prefix"] = "#"
+ self.load_plugins("bareasc")
# Add library elements. Note that self.lib.add overrides any "id="
# and assigns the next free id number.
- self.add_item(title='with accents',
- album_id=2,
- artist='Antonín Dvořák')
- self.add_item(title='without accents',
- artist='Antonín Dvorak')
- self.add_item(title='with umlaut',
- album_id=2,
- artist='Brüggen')
- self.add_item(title='without umlaut or e',
- artist='Bruggen')
- self.add_item(title='without umlaut with e',
- artist='Brueggen')
+ self.add_item(title="with accents", album_id=2, artist="Antonín Dvořák")
+ self.add_item(title="without accents", artist="Antonín Dvorak")
+ self.add_item(title="with umlaut", album_id=2, artist="Brüggen")
+ self.add_item(title="without umlaut or e", artist="Bruggen")
+ self.add_item(title="without umlaut with e", artist="Brueggen")
def test_search_normal_noaccent(self):
"""Normal search, no accents, not using bare-ASCII match.
Finds just the unaccented entry.
"""
- items = self.lib.items('dvorak')
+ items = self.lib.items("dvorak")
self.assertEqual(len(items), 1)
- self.assertEqual([items[0].title], ['without accents'])
+ self.assertEqual([items[0].title], ["without accents"])
def test_search_normal_accent(self):
"""Normal search, with accents, not using bare-ASCII match.
Finds just the accented entry.
"""
- items = self.lib.items('dvořák')
+ items = self.lib.items("dvořák")
self.assertEqual(len(items), 1)
- self.assertEqual([items[0].title], ['with accents'])
+ self.assertEqual([items[0].title], ["with accents"])
def test_search_bareasc_noaccent(self):
"""Bare-ASCII search, no accents.
Finds both entries.
"""
- items = self.lib.items('#dvorak')
+ items = self.lib.items("#dvorak")
self.assertEqual(len(items), 2)
self.assertEqual(
{items[0].title, items[1].title},
- {'without accents', 'with accents'}
+ {"without accents", "with accents"},
)
def test_search_bareasc_accent(self):
@@ -74,12 +66,12 @@ def test_search_bareasc_accent(self):
Finds both entries.
"""
- items = self.lib.items('#dvořák')
+ items = self.lib.items("#dvořák")
self.assertEqual(len(items), 2)
self.assertEqual(
{items[0].title, items[1].title},
- {'without accents', 'with accents'}
+ {"without accents", "with accents"},
)
def test_search_bareasc_wrong_accent(self):
@@ -87,12 +79,12 @@ def test_search_bareasc_wrong_accent(self):
Finds both entries.
"""
- items = self.lib.items('#dvořäk')
+ items = self.lib.items("#dvořäk")
self.assertEqual(len(items), 2)
self.assertEqual(
{items[0].title, items[1].title},
- {'without accents', 'with accents'}
+ {"without accents", "with accents"},
)
def test_search_bareasc_noumlaut(self):
@@ -103,12 +95,12 @@ def test_search_bareasc_noumlaut(self):
This is expected behaviour for this simple plugin.
"""
- items = self.lib.items('#Bruggen')
+ items = self.lib.items("#Bruggen")
self.assertEqual(len(items), 2)
self.assertEqual(
{items[0].title, items[1].title},
- {'without umlaut or e', 'with umlaut'}
+ {"without umlaut or e", "with umlaut"},
)
def test_search_bareasc_umlaut(self):
@@ -119,34 +111,35 @@ def test_search_bareasc_umlaut(self):
This is expected behaviour for this simple plugin.
"""
- items = self.lib.items('#Brüggen')
+ items = self.lib.items("#Brüggen")
self.assertEqual(len(items), 2)
self.assertEqual(
{items[0].title, items[1].title},
- {'without umlaut or e', 'with umlaut'}
+ {"without umlaut or e", "with umlaut"},
)
def test_bareasc_list_output(self):
"""Bare-ASCII version of list command - check output."""
with capture_stdout() as output:
- self.run_command('bareasc', 'with accents')
+ self.run_command("bareasc", "with accents")
- self.assertIn('Antonin Dvorak', output.getvalue())
+ self.assertIn("Antonin Dvorak", output.getvalue())
def test_bareasc_format_output(self):
"""Bare-ASCII version of list -f command - check output."""
with capture_stdout() as output:
- self.run_command('bareasc', 'with accents',
- '-f', '$artist:: $title')
+ self.run_command(
+ "bareasc", "with accents", "-f", "$artist:: $title"
+ )
- self.assertEqual('Antonin Dvorak:: with accents\n',
- output.getvalue())
+ self.assertEqual("Antonin Dvorak:: with accents\n", output.getvalue())
def suite():
"""loader."""
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_beatport.py b/test/plugins/test_beatport.py
index 6e75e58740..3e578e5524 100644
--- a/test/plugins/test_beatport.py
+++ b/test/plugins/test_beatport.py
@@ -16,12 +16,12 @@
"""
import unittest
+from datetime import timedelta
from test import _common
from test.helper import TestHelper
-from datetime import timedelta
-from beetsplug import beatport
from beets import library
+from beetsplug import beatport
class BeatportTest(_common.TestCase, TestHelper):
@@ -34,35 +34,34 @@ def _make_release_response(self):
those required for the tests on this class.
"""
results = {
- "id": 1742984,
- "type": "release",
- "name": "Charade",
- "slug": "charade",
- "releaseDate": "2016-04-11",
- "publishDate": "2016-04-11",
- "audioFormat": "",
- "category": "Release",
- "currentStatus": "General Content",
- "catalogNumber": "GR089",
- "description": "",
- "label": {
- "id": 24539,
- "name": "Gravitas Recordings",
- "type": "label",
- "slug": "gravitas-recordings"
- },
- "artists": [{
- "id": 326158,
- "name": "Supersillyus",
- "slug": "supersillyus",
- "type": "artist"
- }],
- "genres": [{
- "id": 9,
- "name": "Breaks",
- "slug": "breaks",
- "type": "genre"
- }],
+ "id": 1742984,
+ "type": "release",
+ "name": "Charade",
+ "slug": "charade",
+ "releaseDate": "2016-04-11",
+ "publishDate": "2016-04-11",
+ "audioFormat": "",
+ "category": "Release",
+ "currentStatus": "General Content",
+ "catalogNumber": "GR089",
+ "description": "",
+ "label": {
+ "id": 24539,
+ "name": "Gravitas Recordings",
+ "type": "label",
+ "slug": "gravitas-recordings",
+ },
+ "artists": [
+ {
+ "id": 326158,
+ "name": "Supersillyus",
+ "slug": "supersillyus",
+ "type": "artist",
+ }
+ ],
+ "genres": [
+ {"id": 9, "name": "Breaks", "slug": "breaks", "type": "genre"}
+ ],
}
return results
@@ -74,343 +73,386 @@ def _make_tracks_response(self):
The list of elements on the returned list is incomplete, including just
those required for the tests on this class.
"""
- results = [{
- "id": 7817567,
- "type": "track",
- "sku": "track-7817567",
- "name": "Mirage a Trois",
- "trackNumber": 1,
- "mixName": "Original Mix",
- "title": "Mirage a Trois (Original Mix)",
- "slug": "mirage-a-trois-original-mix",
- "releaseDate": "2016-04-11",
- "publishDate": "2016-04-11",
- "currentStatus": "General Content",
- "length": "7:05",
- "lengthMs": 425421,
- "bpm": 90,
- "key": {
- "standard": {
- "letter": "G",
- "sharp": False,
- "flat": False,
- "chord": "minor"
+ results = [
+ {
+ "id": 7817567,
+ "type": "track",
+ "sku": "track-7817567",
+ "name": "Mirage a Trois",
+ "trackNumber": 1,
+ "mixName": "Original Mix",
+ "title": "Mirage a Trois (Original Mix)",
+ "slug": "mirage-a-trois-original-mix",
+ "releaseDate": "2016-04-11",
+ "publishDate": "2016-04-11",
+ "currentStatus": "General Content",
+ "length": "7:05",
+ "lengthMs": 425421,
+ "bpm": 90,
+ "key": {
+ "standard": {
+ "letter": "G",
+ "sharp": False,
+ "flat": False,
+ "chord": "minor",
+ },
+ "shortName": "Gmin",
+ },
+ "artists": [
+ {
+ "id": 326158,
+ "name": "Supersillyus",
+ "slug": "supersillyus",
+ "type": "artist",
+ }
+ ],
+ "genres": [
+ {
+ "id": 9,
+ "name": "Breaks",
+ "slug": "breaks",
+ "type": "genre",
+ }
+ ],
+ "subGenres": [
+ {
+ "id": 209,
+ "name": "Glitch Hop",
+ "slug": "glitch-hop",
+ "type": "subgenre",
+ }
+ ],
+ "release": {
+ "id": 1742984,
+ "name": "Charade",
+ "type": "release",
+ "slug": "charade",
+ },
+ "label": {
+ "id": 24539,
+ "name": "Gravitas Recordings",
+ "type": "label",
+ "slug": "gravitas-recordings",
+ "status": True,
+ },
},
- "shortName": "Gmin"
- },
- "artists": [{
- "id": 326158,
- "name": "Supersillyus",
- "slug": "supersillyus",
- "type": "artist"
- }],
- "genres": [{
- "id": 9,
- "name": "Breaks",
- "slug": "breaks",
- "type": "genre"
- }],
- "subGenres": [{
- "id": 209,
- "name": "Glitch Hop",
- "slug": "glitch-hop",
- "type": "subgenre"
- }],
- "release": {
- "id": 1742984,
- "name": "Charade",
- "type": "release",
- "slug": "charade"
- },
- "label": {
- "id": 24539,
- "name": "Gravitas Recordings",
- "type": "label",
- "slug": "gravitas-recordings",
- "status": True
- }
- }, {
- "id": 7817568,
- "type": "track",
- "sku": "track-7817568",
- "name": "Aeon Bahamut",
- "trackNumber": 2,
- "mixName": "Original Mix",
- "title": "Aeon Bahamut (Original Mix)",
- "slug": "aeon-bahamut-original-mix",
- "releaseDate": "2016-04-11",
- "publishDate": "2016-04-11",
- "currentStatus": "General Content",
- "length": "7:38",
- "lengthMs": 458000,
- "bpm": 100,
- "key": {
- "standard": {
- "letter": "G",
- "sharp": False,
- "flat": False,
- "chord": "major"
+ {
+ "id": 7817568,
+ "type": "track",
+ "sku": "track-7817568",
+ "name": "Aeon Bahamut",
+ "trackNumber": 2,
+ "mixName": "Original Mix",
+ "title": "Aeon Bahamut (Original Mix)",
+ "slug": "aeon-bahamut-original-mix",
+ "releaseDate": "2016-04-11",
+ "publishDate": "2016-04-11",
+ "currentStatus": "General Content",
+ "length": "7:38",
+ "lengthMs": 458000,
+ "bpm": 100,
+ "key": {
+ "standard": {
+ "letter": "G",
+ "sharp": False,
+ "flat": False,
+ "chord": "major",
+ },
+ "shortName": "Gmaj",
+ },
+ "artists": [
+ {
+ "id": 326158,
+ "name": "Supersillyus",
+ "slug": "supersillyus",
+ "type": "artist",
+ }
+ ],
+ "genres": [
+ {
+ "id": 9,
+ "name": "Breaks",
+ "slug": "breaks",
+ "type": "genre",
+ }
+ ],
+ "subGenres": [
+ {
+ "id": 209,
+ "name": "Glitch Hop",
+ "slug": "glitch-hop",
+ "type": "subgenre",
+ }
+ ],
+ "release": {
+ "id": 1742984,
+ "name": "Charade",
+ "type": "release",
+ "slug": "charade",
+ },
+ "label": {
+ "id": 24539,
+ "name": "Gravitas Recordings",
+ "type": "label",
+ "slug": "gravitas-recordings",
+ "status": True,
+ },
},
- "shortName": "Gmaj"
- },
- "artists": [{
- "id": 326158,
- "name": "Supersillyus",
- "slug": "supersillyus",
- "type": "artist"
- }],
- "genres": [{
- "id": 9,
- "name": "Breaks",
- "slug": "breaks",
- "type": "genre"
- }],
- "subGenres": [{
- "id": 209,
- "name": "Glitch Hop",
- "slug": "glitch-hop",
- "type": "subgenre"
- }],
- "release": {
- "id": 1742984,
- "name": "Charade",
- "type": "release",
- "slug": "charade"
- },
- "label": {
- "id": 24539,
- "name": "Gravitas Recordings",
- "type": "label",
- "slug": "gravitas-recordings",
- "status": True
- }
- }, {
- "id": 7817569,
- "type": "track",
- "sku": "track-7817569",
- "name": "Trancendental Medication",
- "trackNumber": 3,
- "mixName": "Original Mix",
- "title": "Trancendental Medication (Original Mix)",
- "slug": "trancendental-medication-original-mix",
- "releaseDate": "2016-04-11",
- "publishDate": "2016-04-11",
- "currentStatus": "General Content",
- "length": "1:08",
- "lengthMs": 68571,
- "bpm": 141,
- "key": {
- "standard": {
- "letter": "F",
- "sharp": False,
- "flat": False,
- "chord": "major"
+ {
+ "id": 7817569,
+ "type": "track",
+ "sku": "track-7817569",
+ "name": "Trancendental Medication",
+ "trackNumber": 3,
+ "mixName": "Original Mix",
+ "title": "Trancendental Medication (Original Mix)",
+ "slug": "trancendental-medication-original-mix",
+ "releaseDate": "2016-04-11",
+ "publishDate": "2016-04-11",
+ "currentStatus": "General Content",
+ "length": "1:08",
+ "lengthMs": 68571,
+ "bpm": 141,
+ "key": {
+ "standard": {
+ "letter": "F",
+ "sharp": False,
+ "flat": False,
+ "chord": "major",
+ },
+ "shortName": "Fmaj",
+ },
+ "artists": [
+ {
+ "id": 326158,
+ "name": "Supersillyus",
+ "slug": "supersillyus",
+ "type": "artist",
+ }
+ ],
+ "genres": [
+ {
+ "id": 9,
+ "name": "Breaks",
+ "slug": "breaks",
+ "type": "genre",
+ }
+ ],
+ "subGenres": [
+ {
+ "id": 209,
+ "name": "Glitch Hop",
+ "slug": "glitch-hop",
+ "type": "subgenre",
+ }
+ ],
+ "release": {
+ "id": 1742984,
+ "name": "Charade",
+ "type": "release",
+ "slug": "charade",
+ },
+ "label": {
+ "id": 24539,
+ "name": "Gravitas Recordings",
+ "type": "label",
+ "slug": "gravitas-recordings",
+ "status": True,
+ },
},
- "shortName": "Fmaj"
- },
- "artists": [{
- "id": 326158,
- "name": "Supersillyus",
- "slug": "supersillyus",
- "type": "artist"
- }],
- "genres": [{
- "id": 9,
- "name": "Breaks",
- "slug": "breaks",
- "type": "genre"
- }],
- "subGenres": [{
- "id": 209,
- "name": "Glitch Hop",
- "slug": "glitch-hop",
- "type": "subgenre"
- }],
- "release": {
- "id": 1742984,
- "name": "Charade",
- "type": "release",
- "slug": "charade"
- },
- "label": {
- "id": 24539,
- "name": "Gravitas Recordings",
- "type": "label",
- "slug": "gravitas-recordings",
- "status": True
- }
- }, {
- "id": 7817570,
- "type": "track",
- "sku": "track-7817570",
- "name": "A List of Instructions for When I'm Human",
- "trackNumber": 4,
- "mixName": "Original Mix",
- "title": "A List of Instructions for When I'm Human (Original Mix)",
- "slug": "a-list-of-instructions-for-when-im-human-original-mix",
- "releaseDate": "2016-04-11",
- "publishDate": "2016-04-11",
- "currentStatus": "General Content",
- "length": "6:57",
- "lengthMs": 417913,
- "bpm": 88,
- "key": {
- "standard": {
- "letter": "A",
- "sharp": False,
- "flat": False,
- "chord": "minor"
+ {
+ "id": 7817570,
+ "type": "track",
+ "sku": "track-7817570",
+ "name": "A List of Instructions for When I'm Human",
+ "trackNumber": 4,
+ "mixName": "Original Mix",
+ "title": "A List of Instructions for When I'm Human (Original Mix)",
+ "slug": "a-list-of-instructions-for-when-im-human-original-mix",
+ "releaseDate": "2016-04-11",
+ "publishDate": "2016-04-11",
+ "currentStatus": "General Content",
+ "length": "6:57",
+ "lengthMs": 417913,
+ "bpm": 88,
+ "key": {
+ "standard": {
+ "letter": "A",
+ "sharp": False,
+ "flat": False,
+ "chord": "minor",
+ },
+ "shortName": "Amin",
+ },
+ "artists": [
+ {
+ "id": 326158,
+ "name": "Supersillyus",
+ "slug": "supersillyus",
+ "type": "artist",
+ }
+ ],
+ "genres": [
+ {
+ "id": 9,
+ "name": "Breaks",
+ "slug": "breaks",
+ "type": "genre",
+ }
+ ],
+ "subGenres": [
+ {
+ "id": 209,
+ "name": "Glitch Hop",
+ "slug": "glitch-hop",
+ "type": "subgenre",
+ }
+ ],
+ "release": {
+ "id": 1742984,
+ "name": "Charade",
+ "type": "release",
+ "slug": "charade",
+ },
+ "label": {
+ "id": 24539,
+ "name": "Gravitas Recordings",
+ "type": "label",
+ "slug": "gravitas-recordings",
+ "status": True,
+ },
},
- "shortName": "Amin"
- },
- "artists": [{
- "id": 326158,
- "name": "Supersillyus",
- "slug": "supersillyus",
- "type": "artist"
- }],
- "genres": [{
- "id": 9,
- "name": "Breaks",
- "slug": "breaks",
- "type": "genre"
- }],
- "subGenres": [{
- "id": 209,
- "name": "Glitch Hop",
- "slug": "glitch-hop",
- "type": "subgenre"
- }],
- "release": {
- "id": 1742984,
- "name": "Charade",
- "type": "release",
- "slug": "charade"
- },
- "label": {
- "id": 24539,
- "name": "Gravitas Recordings",
- "type": "label",
- "slug": "gravitas-recordings",
- "status": True
- }
- }, {
- "id": 7817571,
- "type": "track",
- "sku": "track-7817571",
- "name": "The Great Shenanigan",
- "trackNumber": 5,
- "mixName": "Original Mix",
- "title": "The Great Shenanigan (Original Mix)",
- "slug": "the-great-shenanigan-original-mix",
- "releaseDate": "2016-04-11",
- "publishDate": "2016-04-11",
- "currentStatus": "General Content",
- "length": "9:49",
- "lengthMs": 589875,
- "bpm": 123,
- "key": {
- "standard": {
- "letter": "E",
- "sharp": False,
- "flat": True,
- "chord": "major"
+ {
+ "id": 7817571,
+ "type": "track",
+ "sku": "track-7817571",
+ "name": "The Great Shenanigan",
+ "trackNumber": 5,
+ "mixName": "Original Mix",
+ "title": "The Great Shenanigan (Original Mix)",
+ "slug": "the-great-shenanigan-original-mix",
+ "releaseDate": "2016-04-11",
+ "publishDate": "2016-04-11",
+ "currentStatus": "General Content",
+ "length": "9:49",
+ "lengthMs": 589875,
+ "bpm": 123,
+ "key": {
+ "standard": {
+ "letter": "E",
+ "sharp": False,
+ "flat": True,
+ "chord": "major",
+ },
+ "shortName": "E♭maj",
+ },
+ "artists": [
+ {
+ "id": 326158,
+ "name": "Supersillyus",
+ "slug": "supersillyus",
+ "type": "artist",
+ }
+ ],
+ "genres": [
+ {
+ "id": 9,
+ "name": "Breaks",
+ "slug": "breaks",
+ "type": "genre",
+ }
+ ],
+ "subGenres": [
+ {
+ "id": 209,
+ "name": "Glitch Hop",
+ "slug": "glitch-hop",
+ "type": "subgenre",
+ }
+ ],
+ "release": {
+ "id": 1742984,
+ "name": "Charade",
+ "type": "release",
+ "slug": "charade",
+ },
+ "label": {
+ "id": 24539,
+ "name": "Gravitas Recordings",
+ "type": "label",
+ "slug": "gravitas-recordings",
+ "status": True,
+ },
},
- "shortName": "E♭maj"
- },
- "artists": [{
- "id": 326158,
- "name": "Supersillyus",
- "slug": "supersillyus",
- "type": "artist"
- }],
- "genres": [{
- "id": 9,
- "name": "Breaks",
- "slug": "breaks",
- "type": "genre"
- }],
- "subGenres": [{
- "id": 209,
- "name": "Glitch Hop",
- "slug": "glitch-hop",
- "type": "subgenre"
- }],
- "release": {
- "id": 1742984,
- "name": "Charade",
- "type": "release",
- "slug": "charade"
- },
- "label": {
- "id": 24539,
- "name": "Gravitas Recordings",
- "type": "label",
- "slug": "gravitas-recordings",
- "status": True
- }
- }, {
- "id": 7817572,
- "type": "track",
- "sku": "track-7817572",
- "name": "Charade",
- "trackNumber": 6,
- "mixName": "Original Mix",
- "title": "Charade (Original Mix)",
- "slug": "charade-original-mix",
- "releaseDate": "2016-04-11",
- "publishDate": "2016-04-11",
- "currentStatus": "General Content",
- "length": "7:05",
- "lengthMs": 425423,
- "bpm": 123,
- "key": {
- "standard": {
- "letter": "A",
- "sharp": False,
- "flat": False,
- "chord": "major"
+ {
+ "id": 7817572,
+ "type": "track",
+ "sku": "track-7817572",
+ "name": "Charade",
+ "trackNumber": 6,
+ "mixName": "Original Mix",
+ "title": "Charade (Original Mix)",
+ "slug": "charade-original-mix",
+ "releaseDate": "2016-04-11",
+ "publishDate": "2016-04-11",
+ "currentStatus": "General Content",
+ "length": "7:05",
+ "lengthMs": 425423,
+ "bpm": 123,
+ "key": {
+ "standard": {
+ "letter": "A",
+ "sharp": False,
+ "flat": False,
+ "chord": "major",
+ },
+ "shortName": "Amaj",
+ },
+ "artists": [
+ {
+ "id": 326158,
+ "name": "Supersillyus",
+ "slug": "supersillyus",
+ "type": "artist",
+ }
+ ],
+ "genres": [
+ {
+ "id": 9,
+ "name": "Breaks",
+ "slug": "breaks",
+ "type": "genre",
+ }
+ ],
+ "subGenres": [
+ {
+ "id": 209,
+ "name": "Glitch Hop",
+ "slug": "glitch-hop",
+ "type": "subgenre",
+ }
+ ],
+ "release": {
+ "id": 1742984,
+ "name": "Charade",
+ "type": "release",
+ "slug": "charade",
+ },
+ "label": {
+ "id": 24539,
+ "name": "Gravitas Recordings",
+ "type": "label",
+ "slug": "gravitas-recordings",
+ "status": True,
+ },
},
- "shortName": "Amaj"
- },
- "artists": [{
- "id": 326158,
- "name": "Supersillyus",
- "slug": "supersillyus",
- "type": "artist"
- }],
- "genres": [{
- "id": 9,
- "name": "Breaks",
- "slug": "breaks",
- "type": "genre"
- }],
- "subGenres": [{
- "id": 209,
- "name": "Glitch Hop",
- "slug": "glitch-hop",
- "type": "subgenre"
- }],
- "release": {
- "id": 1742984,
- "name": "Charade",
- "type": "release",
- "slug": "charade"
- },
- "label": {
- "id": 24539,
- "name": "Gravitas Recordings",
- "type": "label",
- "slug": "gravitas-recordings",
- "status": True
- }
- }]
+ ]
return results
def setUp(self):
self.setup_beets()
- self.load_plugins('beatport')
- self.lib = library.Library(':memory:')
+ self.load_plugins("beatport")
+ self.lib = library.Library(":memory:")
# Set up 'album'.
response_release = self._make_release_response()
@@ -433,25 +475,25 @@ def tearDown(self):
def mk_test_album(self):
items = [_common.item() for _ in range(6)]
for item in items:
- item.album = 'Charade'
- item.catalognum = 'GR089'
- item.label = 'Gravitas Recordings'
- item.artist = 'Supersillyus'
+ item.album = "Charade"
+ item.catalognum = "GR089"
+ item.label = "Gravitas Recordings"
+ item.artist = "Supersillyus"
item.year = 2016
item.comp = False
- item.label_name = 'Gravitas Recordings'
- item.genre = 'Glitch Hop'
+ item.label_name = "Gravitas Recordings"
+ item.genre = "Glitch Hop"
item.year = 2016
item.month = 4
item.day = 11
- item.mix_name = 'Original Mix'
+ item.mix_name = "Original Mix"
- items[0].title = 'Mirage a Trois'
- items[1].title = 'Aeon Bahamut'
- items[2].title = 'Trancendental Medication'
- items[3].title = 'A List of Instructions for When I\'m Human'
- items[4].title = 'The Great Shenanigan'
- items[5].title = 'Charade'
+ items[0].title = "Mirage a Trois"
+ items[1].title = "Aeon Bahamut"
+ items[2].title = "Trancendental Medication"
+ items[3].title = "A List of Instructions for When I'm Human"
+ items[4].title = "The Great Shenanigan"
+ items[5].title = "Charade"
items[0].length = timedelta(minutes=7, seconds=5).total_seconds()
items[1].length = timedelta(minutes=7, seconds=38).total_seconds()
@@ -460,12 +502,12 @@ def mk_test_album(self):
items[4].length = timedelta(minutes=9, seconds=49).total_seconds()
items[5].length = timedelta(minutes=7, seconds=5).total_seconds()
- items[0].url = 'mirage-a-trois-original-mix'
- items[1].url = 'aeon-bahamut-original-mix'
- items[2].url = 'trancendental-medication-original-mix'
- items[3].url = 'a-list-of-instructions-for-when-im-human-original-mix'
- items[4].url = 'the-great-shenanigan-original-mix'
- items[5].url = 'charade-original-mix'
+ items[0].url = "mirage-a-trois-original-mix"
+ items[1].url = "aeon-bahamut-original-mix"
+ items[2].url = "trancendental-medication-original-mix"
+ items[3].url = "a-list-of-instructions-for-when-im-human-original-mix"
+ items[4].url = "the-great-shenanigan-original-mix"
+ items[5].url = "charade-original-mix"
counter = 0
for item in items:
@@ -479,12 +521,12 @@ def mk_test_album(self):
items[4].bpm = 123
items[5].bpm = 123
- items[0].initial_key = 'Gmin'
- items[1].initial_key = 'Gmaj'
- items[2].initial_key = 'Fmaj'
- items[3].initial_key = 'Amin'
- items[4].initial_key = 'E♭maj'
- items[5].initial_key = 'Amaj'
+ items[0].initial_key = "Gmin"
+ items[1].initial_key = "Gmaj"
+ items[2].initial_key = "Fmaj"
+ items[3].initial_key = "Amin"
+ items[4].initial_key = "E♭maj"
+ items[5].initial_key = "Amaj"
for item in items:
self.lib.add(item)
@@ -496,21 +538,23 @@ def mk_test_album(self):
# Test BeatportRelease.
def test_album_name_applied(self):
- self.assertEqual(self.album.name, self.test_album['album'])
+ self.assertEqual(self.album.name, self.test_album["album"])
def test_catalog_number_applied(self):
- self.assertEqual(self.album.catalog_number,
- self.test_album['catalognum'])
+ self.assertEqual(
+ self.album.catalog_number, self.test_album["catalognum"]
+ )
def test_label_applied(self):
- self.assertEqual(self.album.label_name, self.test_album['label'])
+ self.assertEqual(self.album.label_name, self.test_album["label"])
def test_category_applied(self):
- self.assertEqual(self.album.category, 'Release')
+ self.assertEqual(self.album.category, "Release")
def test_album_url_applied(self):
- self.assertEqual(self.album.url,
- 'https://beatport.com/release/charade/1742984')
+ self.assertEqual(
+ self.album.url, "https://beatport.com/release/charade/1742984"
+ )
# Test BeatportTrack.
def test_title_applied(self):
@@ -523,8 +567,9 @@ def test_mix_name_applied(self):
def test_length_applied(self):
for track, test_track in zip(self.tracks, self.test_tracks):
- self.assertEqual(int(track.length.total_seconds()),
- int(test_track.length))
+ self.assertEqual(
+ int(track.length.total_seconds()), int(test_track.length)
+ )
def test_track_url_applied(self):
# Specify beatport ids here because an 'item.id' is beets-internal.
@@ -539,8 +584,9 @@ def test_track_url_applied(self):
# Concatenate with 'id' to pass strict equality test.
for track, test_track, id in zip(self.tracks, self.test_tracks, ids):
self.assertEqual(
- track.url, 'https://beatport.com/track/' +
- test_track.url + '/' + str(id))
+ track.url,
+ "https://beatport.com/track/" + test_track.url + "/" + str(id),
+ )
def test_bpm_applied(self):
for track, test_track in zip(self.tracks, self.test_tracks):
@@ -557,28 +603,34 @@ def test_genre_applied(self):
class BeatportResponseEmptyTest(_common.TestCase, TestHelper):
def _make_tracks_response(self):
- results = [{
- "id": 7817567,
- "name": "Mirage a Trois",
- "genres": [{
- "id": 9,
- "name": "Breaks",
- "slug": "breaks",
- "type": "genre"
- }],
- "subGenres": [{
- "id": 209,
- "name": "Glitch Hop",
- "slug": "glitch-hop",
- "type": "subgenre"
- }],
- }]
+ results = [
+ {
+ "id": 7817567,
+ "name": "Mirage a Trois",
+ "genres": [
+ {
+ "id": 9,
+ "name": "Breaks",
+ "slug": "breaks",
+ "type": "genre",
+ }
+ ],
+ "subGenres": [
+ {
+ "id": 209,
+ "name": "Glitch Hop",
+ "slug": "glitch-hop",
+ "type": "subgenre",
+ }
+ ],
+ }
+ ]
return results
def setUp(self):
self.setup_beets()
- self.load_plugins('beatport')
- self.lib = library.Library(':memory:')
+ self.load_plugins("beatport")
+ self.lib = library.Library(":memory:")
# Set up 'tracks'.
self.response_tracks = self._make_tracks_response()
@@ -597,31 +649,31 @@ def test_response_tracks_empty(self):
self.assertEqual(tracks, [])
def test_sub_genre_empty_fallback(self):
- """No 'sub_genre' is provided. Test if fallback to 'genre' works.
- """
- self.response_tracks[0]['subGenres'] = []
+ """No 'sub_genre' is provided. Test if fallback to 'genre' works."""
+ self.response_tracks[0]["subGenres"] = []
tracks = [beatport.BeatportTrack(t) for t in self.response_tracks]
- self.test_tracks[0]['subGenres'] = []
+ self.test_tracks[0]["subGenres"] = []
- self.assertEqual(tracks[0].genre,
- self.test_tracks[0]['genres'][0]['name'])
+ self.assertEqual(
+ tracks[0].genre, self.test_tracks[0]["genres"][0]["name"]
+ )
def test_genre_empty(self):
- """No 'genre' is provided. Test if 'sub_genre' is applied.
- """
- self.response_tracks[0]['genres'] = []
+ """No 'genre' is provided. Test if 'sub_genre' is applied."""
+ self.response_tracks[0]["genres"] = []
tracks = [beatport.BeatportTrack(t) for t in self.response_tracks]
- self.test_tracks[0]['genres'] = []
+ self.test_tracks[0]["genres"] = []
- self.assertEqual(tracks[0].genre,
- self.test_tracks[0]['subGenres'][0]['name'])
+ self.assertEqual(
+ tracks[0].genre, self.test_tracks[0]["subGenres"][0]["name"]
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_bucket.py b/test/plugins/test_bucket.py
index 46091e2424..b2893ca780 100644
--- a/test/plugins/test_bucket.py
+++ b/test/plugins/test_bucket.py
@@ -16,11 +16,11 @@
import unittest
-from beetsplug import bucket
-from beets import config, ui
-
from test.helper import TestHelper
+from beets import config, ui
+from beetsplug import bucket
+
class BucketPluginTest(unittest.TestCase, TestHelper):
def setUp(self):
@@ -30,127 +30,138 @@ def setUp(self):
def tearDown(self):
self.teardown_beets()
- def _setup_config(self, bucket_year=[], bucket_alpha=[],
- bucket_alpha_regex={}, extrapolate=False):
- config['bucket']['bucket_year'] = bucket_year
- config['bucket']['bucket_alpha'] = bucket_alpha
- config['bucket']['bucket_alpha_regex'] = bucket_alpha_regex
- config['bucket']['extrapolate'] = extrapolate
+ def _setup_config(
+ self,
+ bucket_year=[],
+ bucket_alpha=[],
+ bucket_alpha_regex={},
+ extrapolate=False,
+ ):
+ config["bucket"]["bucket_year"] = bucket_year
+ config["bucket"]["bucket_alpha"] = bucket_alpha
+ config["bucket"]["bucket_alpha_regex"] = bucket_alpha_regex
+ config["bucket"]["extrapolate"] = extrapolate
self.plugin.setup()
def test_year_single_year(self):
"""If a single year is given, range starts from this year and stops at
the year preceding the one of next bucket."""
- self._setup_config(bucket_year=['1950s', '1970s'])
- self.assertEqual(self.plugin._tmpl_bucket('1959'), '1950s')
- self.assertEqual(self.plugin._tmpl_bucket('1969'), '1950s')
+ self._setup_config(bucket_year=["1950s", "1970s"])
+ self.assertEqual(self.plugin._tmpl_bucket("1959"), "1950s")
+ self.assertEqual(self.plugin._tmpl_bucket("1969"), "1950s")
def test_year_single_year_last_folder(self):
"""If a single year is given for the last bucket, extend it to current
year."""
- self._setup_config(bucket_year=['1950', '1970'])
- self.assertEqual(self.plugin._tmpl_bucket('2014'), '1970')
- self.assertEqual(self.plugin._tmpl_bucket('2025'), '2025')
+ self._setup_config(bucket_year=["1950", "1970"])
+ self.assertEqual(self.plugin._tmpl_bucket("2014"), "1970")
+ self.assertEqual(self.plugin._tmpl_bucket("2025"), "2025")
def test_year_two_years(self):
"""Buckets can be named with the 'from-to' syntax."""
- self._setup_config(bucket_year=['1950-59', '1960-1969'])
- self.assertEqual(self.plugin._tmpl_bucket('1959'), '1950-59')
- self.assertEqual(self.plugin._tmpl_bucket('1969'), '1960-1969')
+ self._setup_config(bucket_year=["1950-59", "1960-1969"])
+ self.assertEqual(self.plugin._tmpl_bucket("1959"), "1950-59")
+ self.assertEqual(self.plugin._tmpl_bucket("1969"), "1960-1969")
def test_year_multiple_years(self):
"""Buckets can be named by listing all the years"""
- self._setup_config(bucket_year=['1950,51,52,53'])
- self.assertEqual(self.plugin._tmpl_bucket('1953'), '1950,51,52,53')
- self.assertEqual(self.plugin._tmpl_bucket('1974'), '1974')
+ self._setup_config(bucket_year=["1950,51,52,53"])
+ self.assertEqual(self.plugin._tmpl_bucket("1953"), "1950,51,52,53")
+ self.assertEqual(self.plugin._tmpl_bucket("1974"), "1974")
def test_year_out_of_range(self):
"""If no range match, return the year"""
- self._setup_config(bucket_year=['1950-59', '1960-69'])
- self.assertEqual(self.plugin._tmpl_bucket('1974'), '1974')
+ self._setup_config(bucket_year=["1950-59", "1960-69"])
+ self.assertEqual(self.plugin._tmpl_bucket("1974"), "1974")
self._setup_config(bucket_year=[])
- self.assertEqual(self.plugin._tmpl_bucket('1974'), '1974')
+ self.assertEqual(self.plugin._tmpl_bucket("1974"), "1974")
def test_year_out_of_range_extrapolate(self):
"""If no defined range match, extrapolate all ranges using the most
common syntax amongst existing buckets and return the matching one."""
- self._setup_config(bucket_year=['1950-59', '1960-69'],
- extrapolate=True)
- self.assertEqual(self.plugin._tmpl_bucket('1914'), '1910-19')
+ self._setup_config(bucket_year=["1950-59", "1960-69"], extrapolate=True)
+ self.assertEqual(self.plugin._tmpl_bucket("1914"), "1910-19")
# pick single year format
- self._setup_config(bucket_year=['1962-81', '2002', '2012'],
- extrapolate=True)
- self.assertEqual(self.plugin._tmpl_bucket('1983'), '1982')
+ self._setup_config(
+ bucket_year=["1962-81", "2002", "2012"], extrapolate=True
+ )
+ self.assertEqual(self.plugin._tmpl_bucket("1983"), "1982")
# pick from-end format
- self._setup_config(bucket_year=['1962-81', '2002', '2012-14'],
- extrapolate=True)
- self.assertEqual(self.plugin._tmpl_bucket('1983'), '1982-01')
+ self._setup_config(
+ bucket_year=["1962-81", "2002", "2012-14"], extrapolate=True
+ )
+ self.assertEqual(self.plugin._tmpl_bucket("1983"), "1982-01")
# extrapolate add ranges, but never modifies existing ones
- self._setup_config(bucket_year=['1932', '1942', '1952', '1962-81',
- '2002'], extrapolate=True)
- self.assertEqual(self.plugin._tmpl_bucket('1975'), '1962-81')
+ self._setup_config(
+ bucket_year=["1932", "1942", "1952", "1962-81", "2002"],
+ extrapolate=True,
+ )
+ self.assertEqual(self.plugin._tmpl_bucket("1975"), "1962-81")
def test_alpha_all_chars(self):
"""Alphabet buckets can be named by listing all their chars"""
- self._setup_config(bucket_alpha=['ABCD', 'FGH', 'IJKL'])
- self.assertEqual(self.plugin._tmpl_bucket('garry'), 'FGH')
+ self._setup_config(bucket_alpha=["ABCD", "FGH", "IJKL"])
+ self.assertEqual(self.plugin._tmpl_bucket("garry"), "FGH")
def test_alpha_first_last_chars(self):
"""Alphabet buckets can be named by listing the 'from-to' syntax"""
- self._setup_config(bucket_alpha=['0->9', 'A->D', 'F-H', 'I->Z'])
- self.assertEqual(self.plugin._tmpl_bucket('garry'), 'F-H')
- self.assertEqual(self.plugin._tmpl_bucket('2pac'), '0->9')
+ self._setup_config(bucket_alpha=["0->9", "A->D", "F-H", "I->Z"])
+ self.assertEqual(self.plugin._tmpl_bucket("garry"), "F-H")
+ self.assertEqual(self.plugin._tmpl_bucket("2pac"), "0->9")
def test_alpha_out_of_range(self):
"""If no range match, return the initial"""
- self._setup_config(bucket_alpha=['ABCD', 'FGH', 'IJKL'])
- self.assertEqual(self.plugin._tmpl_bucket('errol'), 'E')
+ self._setup_config(bucket_alpha=["ABCD", "FGH", "IJKL"])
+ self.assertEqual(self.plugin._tmpl_bucket("errol"), "E")
self._setup_config(bucket_alpha=[])
- self.assertEqual(self.plugin._tmpl_bucket('errol'), 'E')
+ self.assertEqual(self.plugin._tmpl_bucket("errol"), "E")
def test_alpha_regex(self):
"""Check regex is used"""
- self._setup_config(bucket_alpha=['foo', 'bar'],
- bucket_alpha_regex={'foo': '^[a-d]',
- 'bar': '^[e-z]'})
- self.assertEqual(self.plugin._tmpl_bucket('alpha'), 'foo')
- self.assertEqual(self.plugin._tmpl_bucket('delta'), 'foo')
- self.assertEqual(self.plugin._tmpl_bucket('zeta'), 'bar')
- self.assertEqual(self.plugin._tmpl_bucket('Alpha'), 'A')
+ self._setup_config(
+ bucket_alpha=["foo", "bar"],
+ bucket_alpha_regex={"foo": "^[a-d]", "bar": "^[e-z]"},
+ )
+ self.assertEqual(self.plugin._tmpl_bucket("alpha"), "foo")
+ self.assertEqual(self.plugin._tmpl_bucket("delta"), "foo")
+ self.assertEqual(self.plugin._tmpl_bucket("zeta"), "bar")
+ self.assertEqual(self.plugin._tmpl_bucket("Alpha"), "A")
def test_alpha_regex_mix(self):
"""Check mixing regex and non-regex is possible"""
- self._setup_config(bucket_alpha=['A - D', 'E - L'],
- bucket_alpha_regex={'A - D': '^[0-9a-dA-D…äÄ]'})
- self.assertEqual(self.plugin._tmpl_bucket('alpha'), 'A - D')
- self.assertEqual(self.plugin._tmpl_bucket('Ärzte'), 'A - D')
- self.assertEqual(self.plugin._tmpl_bucket('112'), 'A - D')
- self.assertEqual(self.plugin._tmpl_bucket('…and Oceans'), 'A - D')
- self.assertEqual(self.plugin._tmpl_bucket('Eagles'), 'E - L')
+ self._setup_config(
+ bucket_alpha=["A - D", "E - L"],
+ bucket_alpha_regex={"A - D": "^[0-9a-dA-D…äÄ]"},
+ )
+ self.assertEqual(self.plugin._tmpl_bucket("alpha"), "A - D")
+ self.assertEqual(self.plugin._tmpl_bucket("Ärzte"), "A - D")
+ self.assertEqual(self.plugin._tmpl_bucket("112"), "A - D")
+ self.assertEqual(self.plugin._tmpl_bucket("…and Oceans"), "A - D")
+ self.assertEqual(self.plugin._tmpl_bucket("Eagles"), "E - L")
def test_bad_alpha_range_def(self):
"""If bad alpha range definition, a UserError is raised."""
with self.assertRaises(ui.UserError):
- self._setup_config(bucket_alpha=['$%'])
+ self._setup_config(bucket_alpha=["$%"])
def test_bad_year_range_def_no4digits(self):
"""If bad year range definition, a UserError is raised.
Range origin must be expressed on 4 digits.
"""
with self.assertRaises(ui.UserError):
- self._setup_config(bucket_year=['62-64'])
+ self._setup_config(bucket_year=["62-64"])
def test_bad_year_range_def_nodigits(self):
"""If bad year range definition, a UserError is raised.
At least the range origin must be declared.
"""
with self.assertRaises(ui.UserError):
- self._setup_config(bucket_year=['nodigits'])
+ self._setup_config(bucket_year=["nodigits"])
def check_span_from_str(self, sstr, dfrom, dto):
d = bucket.span_from_str(sstr)
- self.assertEqual(dfrom, d['from'])
- self.assertEqual(dto, d['to'])
+ self.assertEqual(dfrom, d["from"])
+ self.assertEqual(dto, d["to"])
def test_span_from_str(self):
self.check_span_from_str("1980 2000", 1980, 2000)
@@ -162,5 +173,6 @@ def test_span_from_str(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_convert.py b/test/plugins/test_convert.py
index ae15a3cc57..124ca5e766 100644
--- a/test/plugins/test_convert.py
+++ b/test/plugins/test_convert.py
@@ -14,86 +14,90 @@
import fnmatch
-import sys
-import re
import os.path
+import re
+import sys
import unittest
-
-from test import _common
-from test import helper
-from test.helper import control_stdin, capture_log
+from test import _common, helper
+from test.helper import capture_log, control_stdin
from mediafile import MediaFile
+
from beets import util
from beets.util import bytestring_path, displayable_path
def shell_quote(text):
import shlex
+
return shlex.quote(text)
class TestHelper(helper.TestHelper):
-
def tagged_copy_cmd(self, tag):
"""Return a conversion command that copies files and appends
`tag` to the copy.
"""
- if re.search('[^a-zA-Z0-9]', tag):
- raise ValueError("tag '{}' must only contain letters and digits"
- .format(tag))
+ if re.search("[^a-zA-Z0-9]", tag):
+ raise ValueError(
+ "tag '{}' must only contain letters and digits".format(tag)
+ )
# A Python script that copies the file and appends a tag.
- stub = os.path.join(_common.RSRC, b'convert_stub.py').decode('utf-8')
- return "{} {} $source $dest {}".format(shell_quote(sys.executable),
- shell_quote(stub), tag)
+ stub = os.path.join(_common.RSRC, b"convert_stub.py").decode("utf-8")
+ return "{} {} $source $dest {}".format(
+ shell_quote(sys.executable), shell_quote(stub), tag
+ )
def assertFileTag(self, path, tag): # noqa
"""Assert that the path is a file and the files content ends
with `tag`.
"""
display_tag = tag
- tag = tag.encode('utf-8')
+ tag = tag.encode("utf-8")
self.assertIsFile(path)
- with open(path, 'rb') as f:
+ with open(path, "rb") as f:
f.seek(-len(display_tag), os.SEEK_END)
- self.assertEqual(f.read(), tag,
- '{} is not tagged with {}'
- .format(
- displayable_path(path),
- display_tag))
+ self.assertEqual(
+ f.read(),
+ tag,
+ "{} is not tagged with {}".format(
+ displayable_path(path), display_tag
+ ),
+ )
def assertNoFileTag(self, path, tag): # noqa
"""Assert that the path is a file and the files content does not
end with `tag`.
"""
display_tag = tag
- tag = tag.encode('utf-8')
+ tag = tag.encode("utf-8")
self.assertIsFile(path)
- with open(path, 'rb') as f:
+ with open(path, "rb") as f:
f.seek(-len(tag), os.SEEK_END)
- self.assertNotEqual(f.read(), tag,
- '{} is unexpectedly tagged with {}'
- .format(
- displayable_path(path),
- display_tag))
+ self.assertNotEqual(
+ f.read(),
+ tag,
+ "{} is unexpectedly tagged with {}".format(
+ displayable_path(path), display_tag
+ ),
+ )
@_common.slow_test()
class ImportConvertTest(_common.TestCase, TestHelper):
-
def setUp(self):
self.setup_beets(disk=True) # Converter is threaded
self.importer = self.create_importer()
- self.load_plugins('convert')
+ self.load_plugins("convert")
- self.config['convert'] = {
- 'dest': os.path.join(self.temp_dir, b'convert'),
- 'command': self.tagged_copy_cmd('convert'),
+ self.config["convert"] = {
+ "dest": os.path.join(self.temp_dir, b"convert"),
+ "command": self.tagged_copy_cmd("convert"),
# Enforce running convert
- 'max_bitrate': 1,
- 'auto': True,
- 'quiet': False,
+ "max_bitrate": 1,
+ "auto": True,
+ "quiet": False,
}
def tearDown(self):
@@ -103,13 +107,13 @@ def tearDown(self):
def test_import_converted(self):
self.importer.run()
item = self.lib.items().get()
- self.assertFileTag(item.path, 'convert')
+ self.assertFileTag(item.path, "convert")
# FIXME: fails on windows
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_import_original_on_convert_error(self):
# `false` exits with non-zero code
- self.config['convert']['command'] = 'false'
+ self.config["convert"]["command"] = "false"
self.importer.run()
item = self.lib.items().get()
@@ -117,13 +121,17 @@ def test_import_original_on_convert_error(self):
self.assertIsFile(item.path)
def test_delete_originals(self):
- self.config['convert']['delete_originals'] = True
+ self.config["convert"]["delete_originals"] = True
self.importer.run()
for path in self.importer.paths:
for root, dirnames, filenames in os.walk(path):
- self.assertEqual(len(fnmatch.filter(filenames, '*.mp3')), 0,
- 'Non-empty import directory {}'
- .format(util.displayable_path(path)))
+ self.assertEqual(
+ len(fnmatch.filter(filenames, "*.mp3")),
+ 0,
+ "Non-empty import directory {}".format(
+ util.displayable_path(path)
+ ),
+ )
def get_count_of_import_files(self):
import_file_count = 0
@@ -146,8 +154,8 @@ def run_convert_path(self, path, *args):
# an argument bytestring.
path = path.decode(util._fsencoding()).encode(util.arg_encoding())
- args = args + (b'path:' + path,)
- return self.run_command('convert', *args)
+ args = args + (b"path:" + path,)
+ return self.run_command("convert", *args)
def run_convert(self, *args):
"""Run the `convert` command on `self.item`."""
@@ -156,28 +164,27 @@ def run_convert(self, *args):
@_common.slow_test()
class ConvertCliTest(_common.TestCase, TestHelper, ConvertCommand):
-
def setUp(self):
self.setup_beets(disk=True) # Converter is threaded
- self.album = self.add_album_fixture(ext='ogg')
+ self.album = self.add_album_fixture(ext="ogg")
self.item = self.album.items()[0]
- self.load_plugins('convert')
+ self.load_plugins("convert")
self.convert_dest = bytestring_path(
- os.path.join(self.temp_dir, b'convert_dest')
+ os.path.join(self.temp_dir, b"convert_dest")
)
- self.config['convert'] = {
- 'dest': self.convert_dest,
- 'paths': {'default': 'converted'},
- 'format': 'mp3',
- 'formats': {
- 'mp3': self.tagged_copy_cmd('mp3'),
- 'ogg': self.tagged_copy_cmd('ogg'),
- 'opus': {
- 'command': self.tagged_copy_cmd('opus'),
- 'extension': 'ops',
- }
- }
+ self.config["convert"] = {
+ "dest": self.convert_dest,
+ "paths": {"default": "converted"},
+ "format": "mp3",
+ "formats": {
+ "mp3": self.tagged_copy_cmd("mp3"),
+ "ogg": self.tagged_copy_cmd("ogg"),
+ "opus": {
+ "command": self.tagged_copy_cmd("opus"),
+ "extension": "ops",
+ },
+ },
}
def tearDown(self):
@@ -185,142 +192,140 @@ def tearDown(self):
self.teardown_beets()
def test_convert(self):
- with control_stdin('y'):
+ with control_stdin("y"):
self.run_convert()
- converted = os.path.join(self.convert_dest, b'converted.mp3')
- self.assertFileTag(converted, 'mp3')
+ converted = os.path.join(self.convert_dest, b"converted.mp3")
+ self.assertFileTag(converted, "mp3")
def test_convert_with_auto_confirmation(self):
- self.run_convert('--yes')
- converted = os.path.join(self.convert_dest, b'converted.mp3')
- self.assertFileTag(converted, 'mp3')
+ self.run_convert("--yes")
+ converted = os.path.join(self.convert_dest, b"converted.mp3")
+ self.assertFileTag(converted, "mp3")
def test_reject_confirmation(self):
- with control_stdin('n'):
+ with control_stdin("n"):
self.run_convert()
- converted = os.path.join(self.convert_dest, b'converted.mp3')
+ converted = os.path.join(self.convert_dest, b"converted.mp3")
self.assertNotExists(converted)
def test_convert_keep_new(self):
- self.assertEqual(os.path.splitext(self.item.path)[1], b'.ogg')
+ self.assertEqual(os.path.splitext(self.item.path)[1], b".ogg")
- with control_stdin('y'):
- self.run_convert('--keep-new')
+ with control_stdin("y"):
+ self.run_convert("--keep-new")
self.item.load()
- self.assertEqual(os.path.splitext(self.item.path)[1], b'.mp3')
+ self.assertEqual(os.path.splitext(self.item.path)[1], b".mp3")
def test_format_option(self):
- with control_stdin('y'):
- self.run_convert('--format', 'opus')
- converted = os.path.join(self.convert_dest, b'converted.ops')
- self.assertFileTag(converted, 'opus')
+ with control_stdin("y"):
+ self.run_convert("--format", "opus")
+ converted = os.path.join(self.convert_dest, b"converted.ops")
+ self.assertFileTag(converted, "opus")
def test_embed_album_art(self):
- self.config['convert']['embed'] = True
- image_path = os.path.join(_common.RSRC, b'image-2x3.jpg')
+ self.config["convert"]["embed"] = True
+ image_path = os.path.join(_common.RSRC, b"image-2x3.jpg")
self.album.artpath = image_path
self.album.store()
- with open(os.path.join(image_path), 'rb') as f:
+ with open(os.path.join(image_path), "rb") as f:
image_data = f.read()
- with control_stdin('y'):
+ with control_stdin("y"):
self.run_convert()
- converted = os.path.join(self.convert_dest, b'converted.mp3')
+ converted = os.path.join(self.convert_dest, b"converted.mp3")
mediafile = MediaFile(converted)
self.assertEqual(mediafile.images[0].data, image_data)
def test_skip_existing(self):
- converted = os.path.join(self.convert_dest, b'converted.mp3')
- self.touch(converted, content='XXX')
- self.run_convert('--yes')
+ converted = os.path.join(self.convert_dest, b"converted.mp3")
+ self.touch(converted, content="XXX")
+ self.run_convert("--yes")
with open(converted) as f:
- self.assertEqual(f.read(), 'XXX')
+ self.assertEqual(f.read(), "XXX")
def test_pretend(self):
- self.run_convert('--pretend')
- converted = os.path.join(self.convert_dest, b'converted.mp3')
+ self.run_convert("--pretend")
+ converted = os.path.join(self.convert_dest, b"converted.mp3")
self.assertNotExists(converted)
def test_empty_query(self):
- with capture_log('beets.convert') as logs:
- self.run_convert('An impossible query')
- self.assertEqual(logs[0], 'convert: Empty query result.')
+ with capture_log("beets.convert") as logs:
+ self.run_convert("An impossible query")
+ self.assertEqual(logs[0], "convert: Empty query result.")
def test_no_transcode_when_maxbr_set_high_and_different_formats(self):
- self.config['convert']['max_bitrate'] = 5000
- with control_stdin('y'):
+ self.config["convert"]["max_bitrate"] = 5000
+ with control_stdin("y"):
self.run_convert()
- converted = os.path.join(self.convert_dest, b'converted.mp3')
- self.assertFileTag(converted, 'mp3')
+ converted = os.path.join(self.convert_dest, b"converted.mp3")
+ self.assertFileTag(converted, "mp3")
def test_transcode_when_maxbr_set_low_and_different_formats(self):
- self.config['convert']['max_bitrate'] = 5
- with control_stdin('y'):
+ self.config["convert"]["max_bitrate"] = 5
+ with control_stdin("y"):
self.run_convert()
- converted = os.path.join(self.convert_dest, b'converted.mp3')
- self.assertFileTag(converted, 'mp3')
+ converted = os.path.join(self.convert_dest, b"converted.mp3")
+ self.assertFileTag(converted, "mp3")
def test_transcode_when_maxbr_set_to_none_and_different_formats(self):
- with control_stdin('y'):
+ with control_stdin("y"):
self.run_convert()
- converted = os.path.join(self.convert_dest, b'converted.mp3')
- self.assertFileTag(converted, 'mp3')
+ converted = os.path.join(self.convert_dest, b"converted.mp3")
+ self.assertFileTag(converted, "mp3")
def test_no_transcode_when_maxbr_set_high_and_same_formats(self):
- self.config['convert']['max_bitrate'] = 5000
- self.config['convert']['format'] = 'ogg'
- with control_stdin('y'):
+ self.config["convert"]["max_bitrate"] = 5000
+ self.config["convert"]["format"] = "ogg"
+ with control_stdin("y"):
self.run_convert()
- converted = os.path.join(self.convert_dest, b'converted.ogg')
- self.assertNoFileTag(converted, 'ogg')
+ converted = os.path.join(self.convert_dest, b"converted.ogg")
+ self.assertNoFileTag(converted, "ogg")
def test_transcode_when_maxbr_set_low_and_same_formats(self):
- self.config['convert']['max_bitrate'] = 5
- self.config['convert']['format'] = 'ogg'
- with control_stdin('y'):
+ self.config["convert"]["max_bitrate"] = 5
+ self.config["convert"]["format"] = "ogg"
+ with control_stdin("y"):
self.run_convert()
- converted = os.path.join(self.convert_dest, b'converted.ogg')
- self.assertFileTag(converted, 'ogg')
+ converted = os.path.join(self.convert_dest, b"converted.ogg")
+ self.assertFileTag(converted, "ogg")
def test_transcode_when_maxbr_set_to_none_and_same_formats(self):
- self.config['convert']['format'] = 'ogg'
- with control_stdin('y'):
+ self.config["convert"]["format"] = "ogg"
+ with control_stdin("y"):
self.run_convert()
- converted = os.path.join(self.convert_dest, b'converted.ogg')
- self.assertNoFileTag(converted, 'ogg')
+ converted = os.path.join(self.convert_dest, b"converted.ogg")
+ self.assertNoFileTag(converted, "ogg")
def test_playlist(self):
- with control_stdin('y'):
- self.run_convert('--playlist', 'playlist.m3u8')
- m3u_created = os.path.join(self.convert_dest, b'playlist.m3u8')
+ with control_stdin("y"):
+ self.run_convert("--playlist", "playlist.m3u8")
+ m3u_created = os.path.join(self.convert_dest, b"playlist.m3u8")
self.assertTrue(os.path.exists(m3u_created))
def test_playlist_pretend(self):
- self.run_convert('--playlist', 'playlist.m3u8', '--pretend')
- m3u_created = os.path.join(self.convert_dest, b'playlist.m3u8')
+ self.run_convert("--playlist", "playlist.m3u8", "--pretend")
+ m3u_created = os.path.join(self.convert_dest, b"playlist.m3u8")
self.assertFalse(os.path.exists(m3u_created))
@_common.slow_test()
-class NeverConvertLossyFilesTest(_common.TestCase, TestHelper,
- ConvertCommand):
- """Test the effect of the `never_convert_lossy_files` option.
- """
+class NeverConvertLossyFilesTest(_common.TestCase, TestHelper, ConvertCommand):
+ """Test the effect of the `never_convert_lossy_files` option."""
def setUp(self):
self.setup_beets(disk=True) # Converter is threaded
- self.load_plugins('convert')
-
- self.convert_dest = os.path.join(self.temp_dir, b'convert_dest')
- self.config['convert'] = {
- 'dest': self.convert_dest,
- 'paths': {'default': 'converted'},
- 'never_convert_lossy_files': True,
- 'format': 'mp3',
- 'formats': {
- 'mp3': self.tagged_copy_cmd('mp3'),
- }
+ self.load_plugins("convert")
+
+ self.convert_dest = os.path.join(self.temp_dir, b"convert_dest")
+ self.config["convert"] = {
+ "dest": self.convert_dest,
+ "paths": {"default": "converted"},
+ "never_convert_lossy_files": True,
+ "format": "mp3",
+ "formats": {
+ "mp3": self.tagged_copy_cmd("mp3"),
+ },
}
def tearDown(self):
@@ -328,31 +333,31 @@ def tearDown(self):
self.teardown_beets()
def test_transcode_from_lossless(self):
- [item] = self.add_item_fixtures(ext='flac')
- with control_stdin('y'):
+ [item] = self.add_item_fixtures(ext="flac")
+ with control_stdin("y"):
self.run_convert_path(item.path)
- converted = os.path.join(self.convert_dest, b'converted.mp3')
- self.assertFileTag(converted, 'mp3')
+ converted = os.path.join(self.convert_dest, b"converted.mp3")
+ self.assertFileTag(converted, "mp3")
def test_transcode_from_lossy(self):
- self.config['convert']['never_convert_lossy_files'] = False
- [item] = self.add_item_fixtures(ext='ogg')
- with control_stdin('y'):
+ self.config["convert"]["never_convert_lossy_files"] = False
+ [item] = self.add_item_fixtures(ext="ogg")
+ with control_stdin("y"):
self.run_convert_path(item.path)
- converted = os.path.join(self.convert_dest, b'converted.mp3')
- self.assertFileTag(converted, 'mp3')
+ converted = os.path.join(self.convert_dest, b"converted.mp3")
+ self.assertFileTag(converted, "mp3")
def test_transcode_from_lossy_prevented(self):
- [item] = self.add_item_fixtures(ext='ogg')
- with control_stdin('y'):
+ [item] = self.add_item_fixtures(ext="ogg")
+ with control_stdin("y"):
self.run_convert_path(item.path)
- converted = os.path.join(self.convert_dest, b'converted.ogg')
- self.assertNoFileTag(converted, 'mp3')
+ converted = os.path.join(self.convert_dest, b"converted.ogg")
+ self.assertNoFileTag(converted, "mp3")
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_discogs.py b/test/plugins/test_discogs.py
index 25b9962b01..34de922ac0 100644
--- a/test/plugins/test_discogs.py
+++ b/test/plugins/test_discogs.py
@@ -22,7 +22,6 @@
from beets import config
from beets.util.id_extractors import extract_discogs_id_regex
-
from beetsplug.discogs import DiscogsPlugin
@@ -32,77 +31,77 @@ def _make_release(self, tracks=None):
of elements on the returned Bag is incomplete, including just
those required for the tests on this class."""
data = {
- 'id': 'ALBUM ID',
- 'uri': 'https://www.discogs.com/release/release/13633721',
- 'title': 'ALBUM TITLE',
- 'year': '3001',
- 'artists': [{
- 'name': 'ARTIST NAME',
- 'id': 'ARTIST ID',
- 'join': ','
- }],
- 'formats': [{
- 'descriptions': ['FORMAT DESC 1', 'FORMAT DESC 2'],
- 'name': 'FORMAT',
- 'qty': 1
- }],
- 'styles': [
- 'STYLE1', 'STYLE2'
+ "id": "ALBUM ID",
+ "uri": "https://www.discogs.com/release/release/13633721",
+ "title": "ALBUM TITLE",
+ "year": "3001",
+ "artists": [
+ {"name": "ARTIST NAME", "id": "ARTIST ID", "join": ","}
+ ],
+ "formats": [
+ {
+ "descriptions": ["FORMAT DESC 1", "FORMAT DESC 2"],
+ "name": "FORMAT",
+ "qty": 1,
+ }
],
- 'genres': [
- 'GENRE1', 'GENRE2'
+ "styles": ["STYLE1", "STYLE2"],
+ "genres": ["GENRE1", "GENRE2"],
+ "labels": [
+ {
+ "name": "LABEL NAME",
+ "catno": "CATALOG NUMBER",
+ }
],
- 'labels': [{
- 'name': 'LABEL NAME',
- 'catno': 'CATALOG NUMBER',
- }],
- 'tracklist': []
+ "tracklist": [],
}
if tracks:
for recording in tracks:
- data['tracklist'].append(recording)
-
- return Bag(data=data,
- # Make some fields available as properties, as they are
- # accessed by DiscogsPlugin methods.
- title=data['title'],
- artists=[Bag(data=d) for d in data['artists']])
-
- def _make_track(self, title, position='', duration='', type_=None):
- track = {
- 'title': title,
- 'position': position,
- 'duration': duration
- }
+ data["tracklist"].append(recording)
+
+ return Bag(
+ data=data,
+ # Make some fields available as properties, as they are
+ # accessed by DiscogsPlugin methods.
+ title=data["title"],
+ artists=[Bag(data=d) for d in data["artists"]],
+ )
+
+ def _make_track(self, title, position="", duration="", type_=None):
+ track = {"title": title, "position": position, "duration": duration}
if type_ is not None:
# Test samples on discogs_client do not have a 'type_' field, but
# the API seems to return it. Values: 'track' for regular tracks,
# 'heading' for descriptive texts (ie. not real tracks - 12.13.2).
- track['type_'] = type_
+ track["type_"] = type_
return track
def _make_release_from_positions(self, positions):
"""Return a Bag that mimics a discogs_client.Release with a
tracklist where tracks have the specified `positions`."""
- tracks = [self._make_track('TITLE%s' % i, position) for
- (i, position) in enumerate(positions, start=1)]
+ tracks = [
+ self._make_track("TITLE%s" % i, position)
+ for (i, position) in enumerate(positions, start=1)
+ ]
return self._make_release(tracks)
def test_parse_media_for_tracks(self):
- tracks = [self._make_track('TITLE ONE', '1', '01:01'),
- self._make_track('TITLE TWO', '2', '02:02')]
+ tracks = [
+ self._make_track("TITLE ONE", "1", "01:01"),
+ self._make_track("TITLE TWO", "2", "02:02"),
+ ]
release = self._make_release(tracks=tracks)
d = DiscogsPlugin().get_album_info(release)
t = d.tracks
- self.assertEqual(d.media, 'FORMAT')
+ self.assertEqual(d.media, "FORMAT")
self.assertEqual(t[0].media, d.media)
self.assertEqual(t[1].media, d.media)
def test_parse_medium_numbers_single_medium(self):
- release = self._make_release_from_positions(['1', '2'])
+ release = self._make_release_from_positions(["1", "2"])
d = DiscogsPlugin().get_album_info(release)
t = d.tracks
@@ -113,7 +112,7 @@ def test_parse_medium_numbers_single_medium(self):
self.assertEqual(t[0].medium_total, 2)
def test_parse_medium_numbers_two_mediums(self):
- release = self._make_release_from_positions(['1-1', '2-1'])
+ release = self._make_release_from_positions(["1-1", "2-1"])
d = DiscogsPlugin().get_album_info(release)
t = d.tracks
@@ -124,7 +123,7 @@ def test_parse_medium_numbers_two_mediums(self):
self.assertEqual(t[1].medium_total, 1)
def test_parse_medium_numbers_two_mediums_two_sided(self):
- release = self._make_release_from_positions(['A1', 'B1', 'C1'])
+ release = self._make_release_from_positions(["A1", "B1", "C1"])
d = DiscogsPlugin().get_album_info(release)
t = d.tracks
@@ -140,7 +139,7 @@ def test_parse_medium_numbers_two_mediums_two_sided(self):
self.assertEqual(t[2].medium_index, 1)
def test_parse_track_indices(self):
- release = self._make_release_from_positions(['1', '2'])
+ release = self._make_release_from_positions(["1", "2"])
d = DiscogsPlugin().get_album_info(release)
t = d.tracks
@@ -152,8 +151,9 @@ def test_parse_track_indices(self):
self.assertEqual(t[1].medium_total, 2)
def test_parse_track_indices_several_media(self):
- release = self._make_release_from_positions(['1-1', '1-2', '2-1',
- '3-1'])
+ release = self._make_release_from_positions(
+ ["1-1", "1-2", "2-1", "3-1"]
+ )
d = DiscogsPlugin().get_album_info(release)
t = d.tracks
@@ -175,17 +175,18 @@ def test_parse_position(self):
"""Test the conversion of discogs `position` to medium, medium_index
and subtrack_index."""
# List of tuples (discogs_position, (medium, medium_index, subindex)
- positions = [('1', (None, '1', None)),
- ('A12', ('A', '12', None)),
- ('12-34', ('12-', '34', None)),
- ('CD1-1', ('CD1-', '1', None)),
- ('1.12', (None, '1', '12')),
- ('12.a', (None, '12', 'A')),
- ('12.34', (None, '12', '34')),
- ('1ab', (None, '1', 'AB')),
- # Non-standard
- ('IV', ('IV', None, None)),
- ]
+ positions = [
+ ("1", (None, "1", None)),
+ ("A12", ("A", "12", None)),
+ ("12-34", ("12-", "34", None)),
+ ("CD1-1", ("CD1-", "1", None)),
+ ("1.12", (None, "1", "12")),
+ ("12.a", (None, "12", "A")),
+ ("12.34", (None, "12", "34")),
+ ("1ab", (None, "1", "AB")),
+ # Non-standard
+ ("IV", ("IV", None, None)),
+ ]
d = DiscogsPlugin()
for position, expected in positions:
@@ -193,7 +194,7 @@ def test_parse_position(self):
def test_parse_tracklist_without_sides(self):
"""Test standard Discogs position 12.2.9#1: "without sides"."""
- release = self._make_release_from_positions(['1', '2', '3'])
+ release = self._make_release_from_positions(["1", "2", "3"])
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 1)
@@ -201,7 +202,7 @@ def test_parse_tracklist_without_sides(self):
def test_parse_tracklist_with_sides(self):
"""Test standard Discogs position 12.2.9#2: "with sides"."""
- release = self._make_release_from_positions(['A1', 'A2', 'B1', 'B2'])
+ release = self._make_release_from_positions(["A1", "A2", "B1", "B2"])
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 1) # 2 sides = 1 LP
@@ -209,7 +210,7 @@ def test_parse_tracklist_with_sides(self):
def test_parse_tracklist_multiple_lp(self):
"""Test standard Discogs position 12.2.9#3: "multiple LP"."""
- release = self._make_release_from_positions(['A1', 'A2', 'B1', 'C1'])
+ release = self._make_release_from_positions(["A1", "A2", "B1", "C1"])
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 2) # 3 sides = 1 LP + 1 LP
@@ -217,8 +218,9 @@ def test_parse_tracklist_multiple_lp(self):
def test_parse_tracklist_multiple_cd(self):
"""Test standard Discogs position 12.2.9#4: "multiple CDs"."""
- release = self._make_release_from_positions(['1-1', '1-2', '2-1',
- '3-1'])
+ release = self._make_release_from_positions(
+ ["1-1", "1-2", "2-1", "3-1"]
+ )
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 3)
@@ -226,7 +228,7 @@ def test_parse_tracklist_multiple_cd(self):
def test_parse_tracklist_non_standard(self):
"""Test non standard Discogs position."""
- release = self._make_release_from_positions(['I', 'II', 'III', 'IV'])
+ release = self._make_release_from_positions(["I", "II", "III", "IV"])
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 1)
@@ -234,14 +236,15 @@ def test_parse_tracklist_non_standard(self):
def test_parse_tracklist_subtracks_dot(self):
"""Test standard Discogs position 12.2.9#5: "sub tracks, dots"."""
- release = self._make_release_from_positions(['1', '2.1', '2.2', '3'])
+ release = self._make_release_from_positions(["1", "2.1", "2.2", "3"])
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 1)
self.assertEqual(len(d.tracks), 3)
- release = self._make_release_from_positions(['A1', 'A2.1', 'A2.2',
- 'A3'])
+ release = self._make_release_from_positions(
+ ["A1", "A2.1", "A2.2", "A3"]
+ )
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 1)
@@ -249,14 +252,15 @@ def test_parse_tracklist_subtracks_dot(self):
def test_parse_tracklist_subtracks_letter(self):
"""Test standard Discogs position 12.2.9#5: "sub tracks, letter"."""
- release = self._make_release_from_positions(['A1', 'A2a', 'A2b', 'A3'])
+ release = self._make_release_from_positions(["A1", "A2a", "A2b", "A3"])
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 1)
self.assertEqual(len(d.tracks), 3)
- release = self._make_release_from_positions(['A1', 'A2.a', 'A2.b',
- 'A3'])
+ release = self._make_release_from_positions(
+ ["A1", "A2.a", "A2.b", "A3"]
+ )
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 1)
@@ -264,7 +268,7 @@ def test_parse_tracklist_subtracks_letter(self):
def test_parse_tracklist_subtracks_extra_material(self):
"""Test standard Discogs position 12.2.9#6: "extra material"."""
- release = self._make_release_from_positions(['1', '2', 'Video 1'])
+ release = self._make_release_from_positions(["1", "2", "Video 1"])
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 2)
@@ -272,82 +276,87 @@ def test_parse_tracklist_subtracks_extra_material(self):
def test_parse_tracklist_subtracks_indices(self):
"""Test parsing of subtracks that include index tracks."""
- release = self._make_release_from_positions(['', '', '1.1', '1.2'])
+ release = self._make_release_from_positions(["", "", "1.1", "1.2"])
# Track 1: Index track with medium title
- release.data['tracklist'][0]['title'] = 'MEDIUM TITLE'
+ release.data["tracklist"][0]["title"] = "MEDIUM TITLE"
# Track 2: Index track with track group title
- release.data['tracklist'][1]['title'] = 'TRACK GROUP TITLE'
+ release.data["tracklist"][1]["title"] = "TRACK GROUP TITLE"
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 1)
- self.assertEqual(d.tracks[0].disctitle, 'MEDIUM TITLE')
+ self.assertEqual(d.tracks[0].disctitle, "MEDIUM TITLE")
self.assertEqual(len(d.tracks), 1)
- self.assertEqual(d.tracks[0].title, 'TRACK GROUP TITLE')
+ self.assertEqual(d.tracks[0].title, "TRACK GROUP TITLE")
def test_parse_tracklist_subtracks_nested_logical(self):
"""Test parsing of subtracks defined inside a index track that are
logical subtracks (ie. should be grouped together into a single track).
"""
- release = self._make_release_from_positions(['1', '', '3'])
+ release = self._make_release_from_positions(["1", "", "3"])
# Track 2: Index track with track group title, and sub_tracks
- release.data['tracklist'][1]['title'] = 'TRACK GROUP TITLE'
- release.data['tracklist'][1]['sub_tracks'] = [
- self._make_track('TITLE ONE', '2.1', '01:01'),
- self._make_track('TITLE TWO', '2.2', '02:02')
+ release.data["tracklist"][1]["title"] = "TRACK GROUP TITLE"
+ release.data["tracklist"][1]["sub_tracks"] = [
+ self._make_track("TITLE ONE", "2.1", "01:01"),
+ self._make_track("TITLE TWO", "2.2", "02:02"),
]
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 1)
self.assertEqual(len(d.tracks), 3)
- self.assertEqual(d.tracks[1].title, 'TRACK GROUP TITLE')
+ self.assertEqual(d.tracks[1].title, "TRACK GROUP TITLE")
def test_parse_tracklist_subtracks_nested_physical(self):
"""Test parsing of subtracks defined inside a index track that are
physical subtracks (ie. should not be grouped together).
"""
- release = self._make_release_from_positions(['1', '', '4'])
+ release = self._make_release_from_positions(["1", "", "4"])
# Track 2: Index track with track group title, and sub_tracks
- release.data['tracklist'][1]['title'] = 'TRACK GROUP TITLE'
- release.data['tracklist'][1]['sub_tracks'] = [
- self._make_track('TITLE ONE', '2', '01:01'),
- self._make_track('TITLE TWO', '3', '02:02')
+ release.data["tracklist"][1]["title"] = "TRACK GROUP TITLE"
+ release.data["tracklist"][1]["sub_tracks"] = [
+ self._make_track("TITLE ONE", "2", "01:01"),
+ self._make_track("TITLE TWO", "3", "02:02"),
]
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 1)
self.assertEqual(len(d.tracks), 4)
- self.assertEqual(d.tracks[1].title, 'TITLE ONE')
- self.assertEqual(d.tracks[2].title, 'TITLE TWO')
+ self.assertEqual(d.tracks[1].title, "TITLE ONE")
+ self.assertEqual(d.tracks[2].title, "TITLE TWO")
def test_parse_tracklist_disctitles(self):
"""Test parsing of index tracks that act as disc titles."""
- release = self._make_release_from_positions(['', '1-1', '1-2', '',
- '2-1'])
+ release = self._make_release_from_positions(
+ ["", "1-1", "1-2", "", "2-1"]
+ )
# Track 1: Index track with medium title (Cd1)
- release.data['tracklist'][0]['title'] = 'MEDIUM TITLE CD1'
+ release.data["tracklist"][0]["title"] = "MEDIUM TITLE CD1"
# Track 4: Index track with medium title (Cd2)
- release.data['tracklist'][3]['title'] = 'MEDIUM TITLE CD2'
+ release.data["tracklist"][3]["title"] = "MEDIUM TITLE CD2"
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d.mediums, 2)
- self.assertEqual(d.tracks[0].disctitle, 'MEDIUM TITLE CD1')
- self.assertEqual(d.tracks[1].disctitle, 'MEDIUM TITLE CD1')
- self.assertEqual(d.tracks[2].disctitle, 'MEDIUM TITLE CD2')
+ self.assertEqual(d.tracks[0].disctitle, "MEDIUM TITLE CD1")
+ self.assertEqual(d.tracks[1].disctitle, "MEDIUM TITLE CD1")
+ self.assertEqual(d.tracks[2].disctitle, "MEDIUM TITLE CD2")
self.assertEqual(len(d.tracks), 3)
def test_parse_minimal_release(self):
"""Test parsing of a release with the minimal amount of information."""
- data = {'id': 123,
- 'uri': 'https://www.discogs.com/release/123456-something',
- 'tracklist': [self._make_track('A', '1', '01:01')],
- 'artists': [{'name': 'ARTIST NAME', 'id': 321, 'join': ''}],
- 'title': 'TITLE'}
- release = Bag(data=data,
- title=data['title'],
- artists=[Bag(data=d) for d in data['artists']])
+ data = {
+ "id": 123,
+ "uri": "https://www.discogs.com/release/123456-something",
+ "tracklist": [self._make_track("A", "1", "01:01")],
+ "artists": [{"name": "ARTIST NAME", "id": 321, "join": ""}],
+ "title": "TITLE",
+ }
+ release = Bag(
+ data=data,
+ title=data["title"],
+ artists=[Bag(data=d) for d in data["artists"]],
+ )
d = DiscogsPlugin().get_album_info(release)
- self.assertEqual(d.artist, 'ARTIST NAME')
- self.assertEqual(d.album, 'TITLE')
+ self.assertEqual(d.artist, "ARTIST NAME")
+ self.assertEqual(d.album, "TITLE")
self.assertEqual(len(d.tracks), 1)
def test_parse_release_without_required_fields(self):
@@ -357,51 +366,64 @@ def test_parse_release_without_required_fields(self):
d = DiscogsPlugin().get_album_info(release)
self.assertEqual(d, None)
- self.assertIn('Release does not contain the required fields', logs[0])
+ self.assertIn("Release does not contain the required fields", logs[0])
def test_album_for_id(self):
"""Test parsing for a valid Discogs release_id"""
- test_patterns = [('http://www.discogs.com/G%C3%BCnther-Lause-Meru-Ep/release/4354798', 4354798), # NOQA E501
- ('http://www.discogs.com/release/4354798-G%C3%BCnther-Lause-Meru-Ep', 4354798), # NOQA E501
- ('http://www.discogs.com/G%C3%BCnther-4354798Lause-Meru-Ep/release/4354798', 4354798), # NOQA E501
- ('http://www.discogs.com/release/4354798-G%C3%BCnther-4354798Lause-Meru-Ep/', 4354798), # NOQA E501
- ('[r4354798]', 4354798),
- ('r4354798', 4354798),
- ('4354798', 4354798),
- ('yet-another-metadata-provider.org/foo/12345', ''),
- ('005b84a0-ecd6-39f1-b2f6-6eb48756b268', ''),
- ]
+ test_patterns = [
+ (
+ "http://www.discogs.com/G%C3%BCnther-Lause-Meru-Ep/release/4354798",
+ 4354798,
+ ), # NOQA E501
+ (
+ "http://www.discogs.com/release/4354798-G%C3%BCnther-Lause-Meru-Ep",
+ 4354798,
+ ), # NOQA E501
+ (
+ "http://www.discogs.com/G%C3%BCnther-4354798Lause-Meru-Ep/release/4354798",
+ 4354798,
+ ), # NOQA E501
+ (
+ "http://www.discogs.com/release/4354798-G%C3%BCnther-4354798Lause-Meru-Ep/",
+ 4354798,
+ ), # NOQA E501
+ ("[r4354798]", 4354798),
+ ("r4354798", 4354798),
+ ("4354798", 4354798),
+ ("yet-another-metadata-provider.org/foo/12345", ""),
+ ("005b84a0-ecd6-39f1-b2f6-6eb48756b268", ""),
+ ]
for test_pattern, expected in test_patterns:
match = extract_discogs_id_regex(test_pattern)
if not match:
- match = ''
+ match = ""
self.assertEqual(match, expected)
def test_default_genre_style_settings(self):
"""Test genre default settings, genres to genre, styles to style"""
- release = self._make_release_from_positions(['1', '2'])
+ release = self._make_release_from_positions(["1", "2"])
d = DiscogsPlugin().get_album_info(release)
- self.assertEqual(d.genre, 'GENRE1, GENRE2')
- self.assertEqual(d.style, 'STYLE1, STYLE2')
+ self.assertEqual(d.genre, "GENRE1, GENRE2")
+ self.assertEqual(d.style, "STYLE1, STYLE2")
def test_append_style_to_genre(self):
"""Test appending style to genre if config enabled"""
- config['discogs']['append_style_genre'] = True
- release = self._make_release_from_positions(['1', '2'])
+ config["discogs"]["append_style_genre"] = True
+ release = self._make_release_from_positions(["1", "2"])
d = DiscogsPlugin().get_album_info(release)
- self.assertEqual(d.genre, 'GENRE1, GENRE2, STYLE1, STYLE2')
- self.assertEqual(d.style, 'STYLE1, STYLE2')
+ self.assertEqual(d.genre, "GENRE1, GENRE2, STYLE1, STYLE2")
+ self.assertEqual(d.style, "STYLE1, STYLE2")
def test_append_style_to_genre_no_style(self):
"""Test nothing appended to genre if style is empty"""
- config['discogs']['append_style_genre'] = True
- release = self._make_release_from_positions(['1', '2'])
- release.data['styles'] = []
+ config["discogs"]["append_style_genre"] = True
+ release = self._make_release_from_positions(["1", "2"])
+ release.data["styles"] = []
d = DiscogsPlugin().get_album_info(release)
- self.assertEqual(d.genre, 'GENRE1, GENRE2')
+ self.assertEqual(d.genre, "GENRE1, GENRE2")
self.assertEqual(d.style, None)
@@ -409,5 +431,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_edit.py b/test/plugins/test_edit.py
index ad43ca8390..04af561175 100644
--- a/test/plugins/test_edit.py
+++ b/test/plugins/test_edit.py
@@ -14,12 +14,12 @@
import codecs
import unittest
-
-from unittest.mock import patch
from test import _common
from test.helper import TestHelper, control_stdin
+from test.test_importer import AutotagStub, ImportHelper
from test.test_ui_importer import TerminalImportSessionSetup
-from test.test_importer import ImportHelper, AutotagStub
+from unittest.mock import patch
+
from beets.dbcore.query import TrueQuery
from beets.library import Item
from beetsplug.edit import EditPlugin
@@ -31,7 +31,7 @@ class ModifyFileMocker:
"""
def __init__(self, contents=None, replacements=None):
- """ `self.contents` and `self.replacements` are initialized here, in
+ """`self.contents` and `self.replacements` are initialized here, in
order to keep the rest of the functions of this class with the same
signature as `EditPlugin.get_editor()`, making mocking easier.
- `contents`: string with the contents of the file to be used for
@@ -54,25 +54,27 @@ def overwrite_contents(self, filename, log):
`self.contents` is empty, the file remains unchanged.
"""
if self.contents:
- with codecs.open(filename, 'w', encoding='utf-8') as f:
+ with codecs.open(filename, "w", encoding="utf-8") as f:
f.write(self.contents)
def replace_contents(self, filename, log):
"""Modify `filename`, reading its contents and replacing the strings
specified in `self.replacements`.
"""
- with codecs.open(filename, 'r', encoding='utf-8') as f:
+ with codecs.open(filename, "r", encoding="utf-8") as f:
contents = f.read()
for old, new_ in self.replacements.items():
contents = contents.replace(old, new_)
- with codecs.open(filename, 'w', encoding='utf-8') as f:
+ with codecs.open(filename, "w", encoding="utf-8") as f:
f.write(contents)
class EditMixin:
"""Helper containing some common functionality used for the Edit tests."""
- def assertItemFieldsModified(self, library_items, items, fields=[], # noqa
- allowed=['path']):
+
+ def assertItemFieldsModified(
+ self, library_items, items, fields=[], allowed=["path"] # noqa
+ ):
"""Assert that items in the library (`lib_items`) have different values
on the specified `fields` (and *only* on those fields), compared to
`items`.
@@ -82,111 +84,138 @@ def assertItemFieldsModified(self, library_items, items, fields=[], # noqa
(they may or may not have changed; the assertion doesn't care).
"""
for lib_item, item in zip(library_items, items):
- diff_fields = [field for field in lib_item._fields
- if lib_item[field] != item[field]]
- self.assertEqual(set(diff_fields).difference(allowed),
- set(fields))
+ diff_fields = [
+ field
+ for field in lib_item._fields
+ if lib_item[field] != item[field]
+ ]
+ self.assertEqual(set(diff_fields).difference(allowed), set(fields))
def run_mocked_interpreter(self, modify_file_args={}, stdin=[]):
"""Run the edit command during an import session, with mocked stdin and
yaml writing.
"""
m = ModifyFileMocker(**modify_file_args)
- with patch('beetsplug.edit.edit', side_effect=m.action):
- with control_stdin('\n'.join(stdin)):
+ with patch("beetsplug.edit.edit", side_effect=m.action):
+ with control_stdin("\n".join(stdin)):
self.importer.run()
def run_mocked_command(self, modify_file_args={}, stdin=[], args=[]):
"""Run the edit command, with mocked stdin and yaml writing, and
passing `args` to `run_command`."""
m = ModifyFileMocker(**modify_file_args)
- with patch('beetsplug.edit.edit', side_effect=m.action):
- with control_stdin('\n'.join(stdin)):
- self.run_command('edit', *args)
+ with patch("beetsplug.edit.edit", side_effect=m.action):
+ with control_stdin("\n".join(stdin)):
+ self.run_command("edit", *args)
@_common.slow_test()
-@patch('beets.library.Item.write')
+@patch("beets.library.Item.write")
class EditCommandTest(unittest.TestCase, TestHelper, EditMixin):
"""Black box tests for `beetsplug.edit`. Command line interaction is
simulated using `test.helper.control_stdin()`, and yaml editing via an
external editor is simulated using `ModifyFileMocker`.
"""
+
ALBUM_COUNT = 1
TRACK_COUNT = 10
def setUp(self):
self.setup_beets()
- self.load_plugins('edit')
+ self.load_plugins("edit")
# Add an album, storing the original fields for comparison.
self.album = self.add_album_fixture(track_count=self.TRACK_COUNT)
self.album_orig = {f: self.album[f] for f in self.album._fields}
- self.items_orig = [{f: item[f] for f in item._fields} for
- item in self.album.items()]
+ self.items_orig = [
+ {f: item[f] for f in item._fields} for item in self.album.items()
+ ]
def tearDown(self):
EditPlugin.listeners = None
self.teardown_beets()
self.unload_plugins()
- def assertCounts(self, mock_write, album_count=ALBUM_COUNT, track_count=TRACK_COUNT, # noqa
- write_call_count=TRACK_COUNT, title_starts_with=''):
+ def assertCounts(
+ self,
+ mock_write,
+ album_count=ALBUM_COUNT,
+ track_count=TRACK_COUNT, # noqa
+ write_call_count=TRACK_COUNT,
+ title_starts_with="",
+ ):
"""Several common assertions on Album, Track and call counts."""
self.assertEqual(len(self.lib.albums()), album_count)
self.assertEqual(len(self.lib.items()), track_count)
self.assertEqual(mock_write.call_count, write_call_count)
- self.assertTrue(all(i.title.startswith(title_starts_with)
- for i in self.lib.items()))
+ self.assertTrue(
+ all(i.title.startswith(title_starts_with) for i in self.lib.items())
+ )
def test_title_edit_discard(self, mock_write):
"""Edit title for all items in the library, then discard changes."""
# Edit track titles.
- self.run_mocked_command({'replacements': {'t\u00eftle':
- 'modified t\u00eftle'}},
- # Cancel.
- ['c'])
-
- self.assertCounts(mock_write, write_call_count=0,
- title_starts_with='t\u00eftle')
+ self.run_mocked_command(
+ {"replacements": {"t\u00eftle": "modified t\u00eftle"}},
+ # Cancel.
+ ["c"],
+ )
+
+ self.assertCounts(
+ mock_write, write_call_count=0, title_starts_with="t\u00eftle"
+ )
self.assertItemFieldsModified(self.album.items(), self.items_orig, [])
def test_title_edit_apply(self, mock_write):
"""Edit title for all items in the library, then apply changes."""
# Edit track titles.
- self.run_mocked_command({'replacements': {'t\u00eftle':
- 'modified t\u00eftle'}},
- # Apply changes.
- ['a'])
-
- self.assertCounts(mock_write, write_call_count=self.TRACK_COUNT,
- title_starts_with='modified t\u00eftle')
- self.assertItemFieldsModified(self.album.items(), self.items_orig,
- ['title', 'mtime'])
+ self.run_mocked_command(
+ {"replacements": {"t\u00eftle": "modified t\u00eftle"}},
+ # Apply changes.
+ ["a"],
+ )
+
+ self.assertCounts(
+ mock_write,
+ write_call_count=self.TRACK_COUNT,
+ title_starts_with="modified t\u00eftle",
+ )
+ self.assertItemFieldsModified(
+ self.album.items(), self.items_orig, ["title", "mtime"]
+ )
def test_single_title_edit_apply(self, mock_write):
"""Edit title for one item in the library, then apply changes."""
# Edit one track title.
- self.run_mocked_command({'replacements': {'t\u00eftle 9':
- 'modified t\u00eftle 9'}},
- # Apply changes.
- ['a'])
-
- self.assertCounts(mock_write, write_call_count=1,)
+ self.run_mocked_command(
+ {"replacements": {"t\u00eftle 9": "modified t\u00eftle 9"}},
+ # Apply changes.
+ ["a"],
+ )
+
+ self.assertCounts(
+ mock_write,
+ write_call_count=1,
+ )
# No changes except on last item.
- self.assertItemFieldsModified(list(self.album.items())[:-1],
- self.items_orig[:-1], [])
- self.assertEqual(list(self.album.items())[-1].title,
- 'modified t\u00eftle 9')
+ self.assertItemFieldsModified(
+ list(self.album.items())[:-1], self.items_orig[:-1], []
+ )
+ self.assertEqual(
+ list(self.album.items())[-1].title, "modified t\u00eftle 9"
+ )
def test_noedit(self, mock_write):
"""Do not edit anything."""
# Do not edit anything.
- self.run_mocked_command({'contents': None},
- # No stdin.
- [])
-
- self.assertCounts(mock_write, write_call_count=0,
- title_starts_with='t\u00eftle')
+ self.run_mocked_command(
+ {"contents": None},
+ # No stdin.
+ [],
+ )
+
+ self.assertCounts(
+ mock_write, write_call_count=0, title_starts_with="t\u00eftle"
+ )
self.assertItemFieldsModified(self.album.items(), self.items_orig, [])
def test_album_edit_apply(self, mock_write):
@@ -194,102 +223,121 @@ def test_album_edit_apply(self, mock_write):
By design, the album should not be updated.""
"""
# Edit album.
- self.run_mocked_command({'replacements': {'\u00e4lbum':
- 'modified \u00e4lbum'}},
- # Apply changes.
- ['a'])
+ self.run_mocked_command(
+ {"replacements": {"\u00e4lbum": "modified \u00e4lbum"}},
+ # Apply changes.
+ ["a"],
+ )
self.assertCounts(mock_write, write_call_count=self.TRACK_COUNT)
- self.assertItemFieldsModified(self.album.items(), self.items_orig,
- ['album', 'mtime'])
+ self.assertItemFieldsModified(
+ self.album.items(), self.items_orig, ["album", "mtime"]
+ )
# Ensure album is *not* modified.
self.album.load()
- self.assertEqual(self.album.album, '\u00e4lbum')
+ self.assertEqual(self.album.album, "\u00e4lbum")
def test_single_edit_add_field(self, mock_write):
"""Edit the yaml file appending an extra field to the first item, then
apply changes."""
# Append "foo: bar" to item with id == 2. ("id: 1" would match both
# "id: 1" and "id: 10")
- self.run_mocked_command({'replacements': {"id: 2":
- "id: 2\nfoo: bar"}},
- # Apply changes.
- ['a'])
+ self.run_mocked_command(
+ {"replacements": {"id: 2": "id: 2\nfoo: bar"}},
+ # Apply changes.
+ ["a"],
+ )
- self.assertEqual(self.lib.items('id:2')[0].foo, 'bar')
+ self.assertEqual(self.lib.items("id:2")[0].foo, "bar")
# Even though a flexible attribute was written (which is not directly
# written to the tags), write should still be called since templates
# might use it.
- self.assertCounts(mock_write, write_call_count=1,
- title_starts_with='t\u00eftle')
+ self.assertCounts(
+ mock_write, write_call_count=1, title_starts_with="t\u00eftle"
+ )
def test_a_album_edit_apply(self, mock_write):
"""Album query (-a), edit album field, apply changes."""
- self.run_mocked_command({'replacements': {'\u00e4lbum':
- 'modified \u00e4lbum'}},
- # Apply changes.
- ['a'],
- args=['-a'])
+ self.run_mocked_command(
+ {"replacements": {"\u00e4lbum": "modified \u00e4lbum"}},
+ # Apply changes.
+ ["a"],
+ args=["-a"],
+ )
self.album.load()
self.assertCounts(mock_write, write_call_count=self.TRACK_COUNT)
- self.assertEqual(self.album.album, 'modified \u00e4lbum')
- self.assertItemFieldsModified(self.album.items(), self.items_orig,
- ['album', 'mtime'])
+ self.assertEqual(self.album.album, "modified \u00e4lbum")
+ self.assertItemFieldsModified(
+ self.album.items(), self.items_orig, ["album", "mtime"]
+ )
def test_a_albumartist_edit_apply(self, mock_write):
"""Album query (-a), edit albumartist field, apply changes."""
- self.run_mocked_command({'replacements': {'album artist':
- 'modified album artist'}},
- # Apply changes.
- ['a'],
- args=['-a'])
+ self.run_mocked_command(
+ {"replacements": {"album artist": "modified album artist"}},
+ # Apply changes.
+ ["a"],
+ args=["-a"],
+ )
self.album.load()
self.assertCounts(mock_write, write_call_count=self.TRACK_COUNT)
- self.assertEqual(self.album.albumartist, 'the modified album artist')
- self.assertItemFieldsModified(self.album.items(), self.items_orig,
- ['albumartist', 'mtime'])
+ self.assertEqual(self.album.albumartist, "the modified album artist")
+ self.assertItemFieldsModified(
+ self.album.items(), self.items_orig, ["albumartist", "mtime"]
+ )
def test_malformed_yaml(self, mock_write):
"""Edit the yaml file incorrectly (resulting in a malformed yaml
document)."""
# Edit the yaml file to an invalid file.
- self.run_mocked_command({'contents': '!MALFORMED'},
- # Edit again to fix? No.
- ['n'])
+ self.run_mocked_command(
+ {"contents": "!MALFORMED"},
+ # Edit again to fix? No.
+ ["n"],
+ )
- self.assertCounts(mock_write, write_call_count=0,
- title_starts_with='t\u00eftle')
+ self.assertCounts(
+ mock_write, write_call_count=0, title_starts_with="t\u00eftle"
+ )
def test_invalid_yaml(self, mock_write):
"""Edit the yaml file incorrectly (resulting in a well-formed but
invalid yaml document)."""
# Edit the yaml file to an invalid but parseable file.
- self.run_mocked_command({'contents': 'wellformed: yes, but invalid'},
- # No stdin.
- [])
+ self.run_mocked_command(
+ {"contents": "wellformed: yes, but invalid"},
+ # No stdin.
+ [],
+ )
- self.assertCounts(mock_write, write_call_count=0,
- title_starts_with='t\u00eftle')
+ self.assertCounts(
+ mock_write, write_call_count=0, title_starts_with="t\u00eftle"
+ )
@_common.slow_test()
-class EditDuringImporterTest(TerminalImportSessionSetup, unittest.TestCase,
- ImportHelper, TestHelper, EditMixin):
- """TODO
- """
- IGNORED = ['added', 'album_id', 'id', 'mtime', 'path']
+class EditDuringImporterTest(
+ TerminalImportSessionSetup,
+ unittest.TestCase,
+ ImportHelper,
+ TestHelper,
+ EditMixin,
+):
+ """TODO"""
+
+ IGNORED = ["added", "album_id", "id", "mtime", "path"]
def setUp(self):
self.setup_beets()
- self.load_plugins('edit')
+ self.load_plugins("edit")
# Create some mediafiles, and store them for comparison.
self._create_import_dir(3)
self.items_orig = [Item.from_path(f.path) for f in self.media_files]
self.matcher = AutotagStub().install()
self.matcher.matching = AutotagStub.GOOD
- self.config['import']['timid'] = True
+ self.config["import"]["timid"] = True
def tearDown(self):
EditPlugin.listeners = None
@@ -303,23 +351,30 @@ def test_edit_apply_asis(self):
"""
self._setup_import_session()
# Edit track titles.
- self.run_mocked_interpreter({'replacements': {'Tag Title':
- 'Edited Title'}},
- # eDit, Apply changes.
- ['d', 'a'])
+ self.run_mocked_interpreter(
+ {"replacements": {"Tag Title": "Edited Title"}},
+ # eDit, Apply changes.
+ ["d", "a"],
+ )
# Check that only the 'title' field is modified.
- self.assertItemFieldsModified(self.lib.items(), self.items_orig,
- ['title'],
- self.IGNORED + ['albumartist',
- 'mb_albumartistid',
- 'mb_albumartistids',
- ])
- self.assertTrue(all('Edited Title' in i.title
- for i in self.lib.items()))
+ self.assertItemFieldsModified(
+ self.lib.items(),
+ self.items_orig,
+ ["title"],
+ self.IGNORED
+ + [
+ "albumartist",
+ "mb_albumartistid",
+ "mb_albumartistids",
+ ],
+ )
+ self.assertTrue(
+ all("Edited Title" in i.title for i in self.lib.items())
+ )
# Ensure album is *not* fetched from a candidate.
- self.assertEqual(self.lib.albums()[0].mb_albumid, '')
+ self.assertEqual(self.lib.albums()[0].mb_albumid, "")
def test_edit_discard_asis(self):
"""Edit the album field for all items in the library, discard changes,
@@ -327,21 +382,23 @@ def test_edit_discard_asis(self):
"""
self._setup_import_session()
# Edit track titles.
- self.run_mocked_interpreter({'replacements': {'Tag Title':
- 'Edited Title'}},
- # eDit, Cancel, Use as-is.
- ['d', 'c', 'u'])
+ self.run_mocked_interpreter(
+ {"replacements": {"Tag Title": "Edited Title"}},
+ # eDit, Cancel, Use as-is.
+ ["d", "c", "u"],
+ )
# Check that nothing is modified, the album is imported ASIS.
- self.assertItemFieldsModified(self.lib.items(), self.items_orig,
- [],
- self.IGNORED + ['albumartist',
- 'mb_albumartistid'])
- self.assertTrue(all('Tag Title' in i.title
- for i in self.lib.items()))
+ self.assertItemFieldsModified(
+ self.lib.items(),
+ self.items_orig,
+ [],
+ self.IGNORED + ["albumartist", "mb_albumartistid"],
+ )
+ self.assertTrue(all("Tag Title" in i.title for i in self.lib.items()))
# Ensure album is *not* fetched from a candidate.
- self.assertEqual(self.lib.albums()[0].mb_albumid, '')
+ self.assertEqual(self.lib.albums()[0].mb_albumid, "")
def test_edit_apply_candidate(self):
"""Edit the album field for all items in the library, apply changes,
@@ -349,48 +406,52 @@ def test_edit_apply_candidate(self):
"""
self._setup_import_session()
# Edit track titles.
- self.run_mocked_interpreter({'replacements': {'Applied Title':
- 'Edited Title'}},
- # edit Candidates, 1, Apply changes.
- ['c', '1', 'a'])
+ self.run_mocked_interpreter(
+ {"replacements": {"Applied Title": "Edited Title"}},
+ # edit Candidates, 1, Apply changes.
+ ["c", "1", "a"],
+ )
# Check that 'title' field is modified, and other fields come from
# the candidate.
- self.assertTrue(all('Edited Title ' in i.title
- for i in self.lib.items()))
- self.assertTrue(all('match ' in i.mb_trackid
- for i in self.lib.items()))
+ self.assertTrue(
+ all("Edited Title " in i.title for i in self.lib.items())
+ )
+ self.assertTrue(all("match " in i.mb_trackid for i in self.lib.items()))
# Ensure album is fetched from a candidate.
- self.assertIn('albumid', self.lib.albums()[0].mb_albumid)
+ self.assertIn("albumid", self.lib.albums()[0].mb_albumid)
def test_edit_retag_apply(self):
"""Import the album using a candidate, then retag and edit and apply
changes.
"""
self._setup_import_session()
- self.run_mocked_interpreter({},
- # 1, Apply changes.
- ['1', 'a'])
+ self.run_mocked_interpreter(
+ {},
+ # 1, Apply changes.
+ ["1", "a"],
+ )
# Retag and edit track titles. On retag, the importer will reset items
# ids but not the db connections.
self.importer.paths = []
self.importer.query = TrueQuery()
- self.run_mocked_interpreter({'replacements': {'Applied Title':
- 'Edited Title'}},
- # eDit, Apply changes.
- ['d', 'a'])
+ self.run_mocked_interpreter(
+ {"replacements": {"Applied Title": "Edited Title"}},
+ # eDit, Apply changes.
+ ["d", "a"],
+ )
# Check that 'title' field is modified, and other fields come from
# the candidate.
- self.assertTrue(all('Edited Title ' in i.title
- for i in self.lib.items()))
- self.assertTrue(all('match ' in i.mb_trackid
- for i in self.lib.items()))
+ self.assertTrue(
+ all("Edited Title " in i.title for i in self.lib.items())
+ )
+ self.assertTrue(all("match " in i.mb_trackid for i in self.lib.items()))
# Ensure album is fetched from a candidate.
- self.assertIn('albumid', self.lib.albums()[0].mb_albumid)
+ self.assertIn("albumid", self.lib.albums()[0].mb_albumid)
def test_edit_discard_candidate(self):
"""Edit the album field for all items in the library, discard changes,
@@ -398,20 +459,21 @@ def test_edit_discard_candidate(self):
"""
self._setup_import_session()
# Edit track titles.
- self.run_mocked_interpreter({'replacements': {'Applied Title':
- 'Edited Title'}},
- # edit Candidates, 1, Apply changes.
- ['c', '1', 'a'])
+ self.run_mocked_interpreter(
+ {"replacements": {"Applied Title": "Edited Title"}},
+ # edit Candidates, 1, Apply changes.
+ ["c", "1", "a"],
+ )
# Check that 'title' field is modified, and other fields come from
# the candidate.
- self.assertTrue(all('Edited Title ' in i.title
- for i in self.lib.items()))
- self.assertTrue(all('match ' in i.mb_trackid
- for i in self.lib.items()))
+ self.assertTrue(
+ all("Edited Title " in i.title for i in self.lib.items())
+ )
+ self.assertTrue(all("match " in i.mb_trackid for i in self.lib.items()))
# Ensure album is fetched from a candidate.
- self.assertIn('albumid', self.lib.albums()[0].mb_albumid)
+ self.assertIn("albumid", self.lib.albums()[0].mb_albumid)
def test_edit_apply_asis_singleton(self):
"""Edit the album field for all items in the library, apply changes,
@@ -419,18 +481,22 @@ def test_edit_apply_asis_singleton(self):
"""
self._setup_import_session(singletons=True)
# Edit track titles.
- self.run_mocked_interpreter({'replacements': {'Tag Title':
- 'Edited Title'}},
- # eDit, Apply changes, aBort.
- ['d', 'a', 'b'])
+ self.run_mocked_interpreter(
+ {"replacements": {"Tag Title": "Edited Title"}},
+ # eDit, Apply changes, aBort.
+ ["d", "a", "b"],
+ )
# Check that only the 'title' field is modified.
- self.assertItemFieldsModified(self.lib.items(), self.items_orig,
- ['title'],
- self.IGNORED + ['albumartist',
- 'mb_albumartistid'])
- self.assertTrue(all('Edited Title' in i.title
- for i in self.lib.items()))
+ self.assertItemFieldsModified(
+ self.lib.items(),
+ self.items_orig,
+ ["title"],
+ self.IGNORED + ["albumartist", "mb_albumartistid"],
+ )
+ self.assertTrue(
+ all("Edited Title" in i.title for i in self.lib.items())
+ )
def test_edit_apply_candidate_singleton(self):
"""Edit the album field for all items in the library, apply changes,
@@ -438,21 +504,23 @@ def test_edit_apply_candidate_singleton(self):
"""
self._setup_import_session()
# Edit track titles.
- self.run_mocked_interpreter({'replacements': {'Applied Title':
- 'Edited Title'}},
- # edit Candidates, 1, Apply changes, aBort.
- ['c', '1', 'a', 'b'])
+ self.run_mocked_interpreter(
+ {"replacements": {"Applied Title": "Edited Title"}},
+ # edit Candidates, 1, Apply changes, aBort.
+ ["c", "1", "a", "b"],
+ )
# Check that 'title' field is modified, and other fields come from
# the candidate.
- self.assertTrue(all('Edited Title ' in i.title
- for i in self.lib.items()))
- self.assertTrue(all('match ' in i.mb_trackid
- for i in self.lib.items()))
+ self.assertTrue(
+ all("Edited Title " in i.title for i in self.lib.items())
+ )
+ self.assertTrue(all("match " in i.mb_trackid for i in self.lib.items()))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_embedart.py b/test/plugins/test_embedart.py
index 408655bb93..55dd18dc31 100644
--- a/test/plugins/test_embedart.py
+++ b/test/plugins/test_embedart.py
@@ -15,24 +15,22 @@
import os.path
import shutil
-from unittest.mock import patch, MagicMock
import tempfile
import unittest
-
from test import _common
from test.helper import TestHelper
+from test.plugins.test_art import FetchImageHelper
from test.test_art_resize import DummyIMBackend
+from unittest.mock import MagicMock, patch
from mediafile import MediaFile
-from beets import config, logging, ui
+
+from beets import art, config, logging, ui
from beets.util import bytestring_path, displayable_path, syspath
from beets.util.artresizer import ArtResizer
-from beets import art
-from test.plugins.test_art import FetchImageHelper
def require_artresizer_compare(test):
-
def wrapper(*args, **kwargs):
if not ArtResizer.shared.can_compare:
raise unittest.SkipTest("compare not available")
@@ -44,22 +42,21 @@ def wrapper(*args, **kwargs):
class EmbedartCliTest(TestHelper, FetchImageHelper):
-
- small_artpath = os.path.join(_common.RSRC, b'image-2x3.jpg')
- abbey_artpath = os.path.join(_common.RSRC, b'abbey.jpg')
- abbey_similarpath = os.path.join(_common.RSRC, b'abbey-similar.jpg')
- abbey_differentpath = os.path.join(_common.RSRC, b'abbey-different.jpg')
+ small_artpath = os.path.join(_common.RSRC, b"image-2x3.jpg")
+ abbey_artpath = os.path.join(_common.RSRC, b"abbey.jpg")
+ abbey_similarpath = os.path.join(_common.RSRC, b"abbey-similar.jpg")
+ abbey_differentpath = os.path.join(_common.RSRC, b"abbey-different.jpg")
def setUp(self):
super().setUp()
self.io.install()
self.setup_beets() # Converter is threaded
- self.load_plugins('embedart')
+ self.load_plugins("embedart")
def _setup_data(self, artpath=None):
if not artpath:
artpath = self.small_artpath
- with open(syspath(artpath), 'rb') as f:
+ with open(syspath(artpath), "rb") as f:
self.image_data = f.read()
def tearDown(self):
@@ -70,8 +67,8 @@ def test_embed_art_from_file_with_yes_input(self):
self._setup_data()
album = self.add_album_fixture()
item = album.items()[0]
- self.io.addinput('y')
- self.run_command('embedart', '-f', self.small_artpath)
+ self.io.addinput("y")
+ self.run_command("embedart", "-f", self.small_artpath)
mediafile = MediaFile(syspath(item.path))
self.assertEqual(mediafile.images[0].data, self.image_data)
@@ -79,8 +76,8 @@ def test_embed_art_from_file_with_no_input(self):
self._setup_data()
album = self.add_album_fixture()
item = album.items()[0]
- self.io.addinput('n')
- self.run_command('embedart', '-f', self.small_artpath)
+ self.io.addinput("n")
+ self.run_command("embedart", "-f", self.small_artpath)
mediafile = MediaFile(syspath(item.path))
# make sure that images array is empty (nothing embedded)
self.assertFalse(mediafile.images)
@@ -89,7 +86,7 @@ def test_embed_art_from_file(self):
self._setup_data()
album = self.add_album_fixture()
item = album.items()[0]
- self.run_command('embedart', '-y', '-f', self.small_artpath)
+ self.run_command("embedart", "-y", "-f", self.small_artpath)
mediafile = MediaFile(syspath(item.path))
self.assertEqual(mediafile.images[0].data, self.image_data)
@@ -99,7 +96,7 @@ def test_embed_art_from_album(self):
item = album.items()[0]
album.artpath = self.small_artpath
album.store()
- self.run_command('embedart', '-y')
+ self.run_command("embedart", "-y")
mediafile = MediaFile(syspath(item.path))
self.assertEqual(mediafile.images[0].data, self.image_data)
@@ -107,7 +104,7 @@ def test_embed_art_remove_art_file(self):
self._setup_data()
album = self.add_album_fixture()
- logging.getLogger('beets.embedart').setLevel(logging.DEBUG)
+ logging.getLogger("beets.embedart").setLevel(logging.DEBUG)
handle, tmp_path = tempfile.mkstemp()
tmp_path = bytestring_path(tmp_path)
@@ -117,32 +114,34 @@ def test_embed_art_remove_art_file(self):
album.artpath = tmp_path
album.store()
- config['embedart']['remove_art_file'] = True
- self.run_command('embedart', '-y')
+ config["embedart"]["remove_art_file"] = True
+ self.run_command("embedart", "-y")
if os.path.isfile(syspath(tmp_path)):
os.remove(syspath(tmp_path))
- self.fail('Artwork file {} was not deleted'.format(
- displayable_path(tmp_path)
- ))
+ self.fail(
+ "Artwork file {} was not deleted".format(
+ displayable_path(tmp_path)
+ )
+ )
def test_art_file_missing(self):
self.add_album_fixture()
- logging.getLogger('beets.embedart').setLevel(logging.DEBUG)
+ logging.getLogger("beets.embedart").setLevel(logging.DEBUG)
with self.assertRaises(ui.UserError):
- self.run_command('embedart', '-y', '-f', '/doesnotexist')
+ self.run_command("embedart", "-y", "-f", "/doesnotexist")
def test_embed_non_image_file(self):
album = self.add_album_fixture()
- logging.getLogger('beets.embedart').setLevel(logging.DEBUG)
+ logging.getLogger("beets.embedart").setLevel(logging.DEBUG)
handle, tmp_path = tempfile.mkstemp()
tmp_path = bytestring_path(tmp_path)
- os.write(handle, b'I am not an image.')
+ os.write(handle, b"I am not an image.")
os.close(handle)
try:
- self.run_command('embedart', '-y', '-f', tmp_path)
+ self.run_command("embedart", "-y", "-f", tmp_path)
finally:
os.remove(syspath(tmp_path))
@@ -154,59 +153,67 @@ def test_reject_different_art(self):
self._setup_data(self.abbey_artpath)
album = self.add_album_fixture()
item = album.items()[0]
- self.run_command('embedart', '-y', '-f', self.abbey_artpath)
- config['embedart']['compare_threshold'] = 20
- self.run_command('embedart', '-y', '-f', self.abbey_differentpath)
+ self.run_command("embedart", "-y", "-f", self.abbey_artpath)
+ config["embedart"]["compare_threshold"] = 20
+ self.run_command("embedart", "-y", "-f", self.abbey_differentpath)
mediafile = MediaFile(syspath(item.path))
- self.assertEqual(mediafile.images[0].data, self.image_data,
- 'Image written is not {}'.format(
- displayable_path(self.abbey_artpath)))
+ self.assertEqual(
+ mediafile.images[0].data,
+ self.image_data,
+ "Image written is not {}".format(
+ displayable_path(self.abbey_artpath)
+ ),
+ )
@require_artresizer_compare
def test_accept_similar_art(self):
self._setup_data(self.abbey_similarpath)
album = self.add_album_fixture()
item = album.items()[0]
- self.run_command('embedart', '-y', '-f', self.abbey_artpath)
- config['embedart']['compare_threshold'] = 20
- self.run_command('embedart', '-y', '-f', self.abbey_similarpath)
+ self.run_command("embedart", "-y", "-f", self.abbey_artpath)
+ config["embedart"]["compare_threshold"] = 20
+ self.run_command("embedart", "-y", "-f", self.abbey_similarpath)
mediafile = MediaFile(syspath(item.path))
- self.assertEqual(mediafile.images[0].data, self.image_data,
- 'Image written is not {}'.format(
- displayable_path(self.abbey_similarpath)))
+ self.assertEqual(
+ mediafile.images[0].data,
+ self.image_data,
+ "Image written is not {}".format(
+ displayable_path(self.abbey_similarpath)
+ ),
+ )
def test_non_ascii_album_path(self):
- resource_path = os.path.join(_common.RSRC, b'image.mp3')
+ resource_path = os.path.join(_common.RSRC, b"image.mp3")
album = self.add_album_fixture()
trackpath = album.items()[0].path
albumpath = album.path
shutil.copy(syspath(resource_path), syspath(trackpath))
- self.run_command('extractart', '-n', 'extracted')
+ self.run_command("extractart", "-n", "extracted")
- self.assertExists(os.path.join(albumpath, b'extracted.png'))
+ self.assertExists(os.path.join(albumpath, b"extracted.png"))
def test_extracted_extension(self):
- resource_path = os.path.join(_common.RSRC, b'image-jpeg.mp3')
+ resource_path = os.path.join(_common.RSRC, b"image-jpeg.mp3")
album = self.add_album_fixture()
trackpath = album.items()[0].path
albumpath = album.path
shutil.copy(syspath(resource_path), syspath(trackpath))
- self.run_command('extractart', '-n', 'extracted')
+ self.run_command("extractart", "-n", "extracted")
- self.assertExists(os.path.join(albumpath, b'extracted.jpg'))
+ self.assertExists(os.path.join(albumpath, b"extracted.jpg"))
def test_clear_art_with_yes_input(self):
self._setup_data()
album = self.add_album_fixture()
item = album.items()[0]
- self.io.addinput('y')
- self.run_command('embedart', '-f', self.small_artpath)
- self.io.addinput('y')
- self.run_command('clearart')
+ self.io.addinput("y")
+ self.run_command("embedart", "-f", self.small_artpath)
+ self.io.addinput("y")
+ self.run_command("clearart")
mediafile = MediaFile(syspath(item.path))
self.assertFalse(mediafile.images)
@@ -214,10 +221,10 @@ def test_clear_art_with_no_input(self):
self._setup_data()
album = self.add_album_fixture()
item = album.items()[0]
- self.io.addinput('y')
- self.run_command('embedart', '-f', self.small_artpath)
- self.io.addinput('n')
- self.run_command('clearart')
+ self.io.addinput("y")
+ self.run_command("embedart", "-f", self.small_artpath)
+ self.io.addinput("n")
+ self.run_command("clearart")
mediafile = MediaFile(syspath(item.path))
self.assertEqual(mediafile.images[0].data, self.image_data)
@@ -225,34 +232,33 @@ def test_embed_art_from_url_with_yes_input(self):
self._setup_data()
album = self.add_album_fixture()
item = album.items()[0]
- self.mock_response('http://example.com/test.jpg', 'image/jpeg')
- self.io.addinput('y')
- self.run_command('embedart', '-u', 'http://example.com/test.jpg')
+ self.mock_response("http://example.com/test.jpg", "image/jpeg")
+ self.io.addinput("y")
+ self.run_command("embedart", "-u", "http://example.com/test.jpg")
mediafile = MediaFile(syspath(item.path))
self.assertEqual(
mediafile.images[0].data,
- self.IMAGEHEADER.get('image/jpeg').ljust(32, b'\x00')
+ self.IMAGEHEADER.get("image/jpeg").ljust(32, b"\x00"),
)
def test_embed_art_from_url_png(self):
self._setup_data()
album = self.add_album_fixture()
item = album.items()[0]
- self.mock_response('http://example.com/test.png', 'image/png')
- self.run_command('embedart', '-y', '-u', 'http://example.com/test.png')
+ self.mock_response("http://example.com/test.png", "image/png")
+ self.run_command("embedart", "-y", "-u", "http://example.com/test.png")
mediafile = MediaFile(syspath(item.path))
self.assertEqual(
mediafile.images[0].data,
- self.IMAGEHEADER.get('image/png').ljust(32, b'\x00')
+ self.IMAGEHEADER.get("image/png").ljust(32, b"\x00"),
)
def test_embed_art_from_url_not_image(self):
self._setup_data()
album = self.add_album_fixture()
item = album.items()[0]
- self.mock_response('http://example.com/test.html', 'text/html')
- self.run_command('embedart', '-y', '-u',
- 'http://example.com/test.html')
+ self.mock_response("http://example.com/test.html", "text/html")
+ self.run_command("embedart", "-y", "-u", "http://example.com/test.html")
mediafile = MediaFile(syspath(item.path))
self.assertFalse(mediafile.images)
@@ -261,23 +267,24 @@ class DummyArtResizer(ArtResizer):
"""An `ArtResizer` which pretends that ImageMagick is available, and has
a sufficiently recent version to support image comparison.
"""
+
def __init__(self):
self.local_method = DummyIMBackend()
-@patch('beets.util.artresizer.subprocess')
-@patch('beets.art.extract')
+@patch("beets.util.artresizer.subprocess")
+@patch("beets.art.extract")
class ArtSimilarityTest(unittest.TestCase):
def setUp(self):
self.item = _common.item()
- self.log = logging.getLogger('beets.embedart')
+ self.log = logging.getLogger("beets.embedart")
self.artresizer = DummyArtResizer()
def _similarity(self, threshold):
return art.check_art_similarity(
self.log,
self.item,
- b'path',
+ b"path",
threshold,
artresizer=self.artresizer,
)
@@ -288,9 +295,16 @@ def _popen(self, status=0, stdout="", stderr=""):
popen.communicate.return_value = stdout, stderr
return popen
- def _mock_popens(self, mock_extract, mock_subprocess, compare_status=0,
- compare_stdout="", compare_stderr="", convert_status=0):
- mock_extract.return_value = b'extracted_path'
+ def _mock_popens(
+ self,
+ mock_extract,
+ mock_subprocess,
+ compare_status=0,
+ compare_stdout="",
+ compare_stderr="",
+ convert_status=0,
+ ):
+ mock_extract.return_value = b"extracted_path"
mock_subprocess.Popen.side_effect = [
# The `convert` call.
self._popen(convert_status),
@@ -322,8 +336,9 @@ def test_compare_parsing_error(self, mock_extract, mock_subprocess):
self._mock_popens(mock_extract, mock_subprocess, 0, "foo", "bar")
self.assertIsNone(self._similarity(20))
- def test_compare_parsing_error_and_failure(self, mock_extract,
- mock_subprocess):
+ def test_compare_parsing_error_and_failure(
+ self, mock_extract, mock_subprocess
+ ):
self._mock_popens(mock_extract, mock_subprocess, 1, "foo", "bar")
self.assertIsNone(self._similarity(20))
@@ -335,5 +350,6 @@ def test_convert_failure(self, mock_extract, mock_subprocess):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_embyupdate.py b/test/plugins/test_embyupdate.py
index f45b82de69..55cebb01ed 100644
--- a/test/plugins/test_embyupdate.py
+++ b/test/plugins/test_embyupdate.py
@@ -1,19 +1,21 @@
-from test.helper import TestHelper
-from beetsplug import embyupdate
import unittest
+from test.helper import TestHelper
+
import responses
+from beetsplug import embyupdate
+
class EmbyUpdateTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
- self.load_plugins('embyupdate')
+ self.load_plugins("embyupdate")
- self.config['emby'] = {
- 'host': 'localhost',
- 'port': 8096,
- 'username': 'username',
- 'password': 'password'
+ self.config["emby"] = {
+ "host": "localhost",
+ "port": 8096,
+ "username": "username",
+ "password": "password",
}
def tearDown(self):
@@ -22,134 +24,146 @@ def tearDown(self):
def test_api_url_only_name(self):
self.assertEqual(
- embyupdate.api_url(self.config['emby']['host'].get(),
- self.config['emby']['port'].get(),
- '/Library/Refresh'),
- 'http://localhost:8096/Library/Refresh?format=json'
+ embyupdate.api_url(
+ self.config["emby"]["host"].get(),
+ self.config["emby"]["port"].get(),
+ "/Library/Refresh",
+ ),
+ "http://localhost:8096/Library/Refresh?format=json",
)
def test_api_url_http(self):
self.assertEqual(
- embyupdate.api_url('http://localhost',
- self.config['emby']['port'].get(),
- '/Library/Refresh'),
- 'http://localhost:8096/Library/Refresh?format=json'
+ embyupdate.api_url(
+ "http://localhost",
+ self.config["emby"]["port"].get(),
+ "/Library/Refresh",
+ ),
+ "http://localhost:8096/Library/Refresh?format=json",
)
def test_api_url_https(self):
self.assertEqual(
- embyupdate.api_url('https://localhost',
- self.config['emby']['port'].get(),
- '/Library/Refresh'),
- 'https://localhost:8096/Library/Refresh?format=json'
+ embyupdate.api_url(
+ "https://localhost",
+ self.config["emby"]["port"].get(),
+ "/Library/Refresh",
+ ),
+ "https://localhost:8096/Library/Refresh?format=json",
)
def test_password_data(self):
self.assertEqual(
- embyupdate.password_data(self.config['emby']['username'].get(),
- self.config['emby']['password'].get()),
+ embyupdate.password_data(
+ self.config["emby"]["username"].get(),
+ self.config["emby"]["password"].get(),
+ ),
{
- 'username': 'username',
- 'password': '5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8',
- 'passwordMd5': '5f4dcc3b5aa765d61d8327deb882cf99'
- }
+ "username": "username",
+ "password": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
+ "passwordMd5": "5f4dcc3b5aa765d61d8327deb882cf99",
+ },
)
def test_create_header_no_token(self):
self.assertEqual(
- embyupdate.create_headers('e8837bc1-ad67-520e-8cd2-f629e3155721'),
+ embyupdate.create_headers("e8837bc1-ad67-520e-8cd2-f629e3155721"),
{
- 'x-emby-authorization': (
- 'MediaBrowser '
+ "x-emby-authorization": (
+ "MediaBrowser "
'UserId="e8837bc1-ad67-520e-8cd2-f629e3155721", '
'Client="other", '
'Device="beets", '
'DeviceId="beets", '
'Version="0.0.0"'
)
- }
+ },
)
def test_create_header_with_token(self):
self.assertEqual(
- embyupdate.create_headers('e8837bc1-ad67-520e-8cd2-f629e3155721',
- token='abc123'),
+ embyupdate.create_headers(
+ "e8837bc1-ad67-520e-8cd2-f629e3155721", token="abc123"
+ ),
{
- 'x-emby-authorization': (
- 'MediaBrowser '
+ "x-emby-authorization": (
+ "MediaBrowser "
'UserId="e8837bc1-ad67-520e-8cd2-f629e3155721", '
'Client="other", '
'Device="beets", '
'DeviceId="beets", '
'Version="0.0.0"'
),
- 'x-mediabrowser-token': 'abc123'
- }
+ "x-mediabrowser-token": "abc123",
+ },
)
@responses.activate
def test_get_token(self):
- body = ('{"User":{"Name":"username", '
- '"ServerId":"1efa5077976bfa92bc71652404f646ec",'
- '"Id":"2ec276a2642e54a19b612b9418a8bd3b","HasPassword":true,'
- '"HasConfiguredPassword":true,'
- '"HasConfiguredEasyPassword":false,'
- '"LastLoginDate":"2015-11-09T08:35:03.6357440Z",'
- '"LastActivityDate":"2015-11-09T08:35:03.6665060Z",'
- '"Configuration":{"AudioLanguagePreference":"",'
- '"PlayDefaultAudioTrack":true,"SubtitleLanguagePreference":"",'
- '"DisplayMissingEpisodes":false,'
- '"DisplayUnairedEpisodes":false,'
- '"GroupMoviesIntoBoxSets":false,'
- '"DisplayChannelsWithinViews":[],'
- '"ExcludeFoldersFromGrouping":[],"GroupedFolders":[],'
- '"SubtitleMode":"Default","DisplayCollectionsView":true,'
- '"DisplayFoldersView":false,"EnableLocalPassword":false,'
- '"OrderedViews":[],"IncludeTrailersInSuggestions":true,'
- '"EnableCinemaMode":true,"LatestItemsExcludes":[],'
- '"PlainFolderViews":[],"HidePlayedInLatest":true,'
- '"DisplayChannelsInline":false},'
- '"Policy":{"IsAdministrator":true,"IsHidden":false,'
- '"IsDisabled":false,"BlockedTags":[],'
- '"EnableUserPreferenceAccess":true,"AccessSchedules":[],'
- '"BlockUnratedItems":[],'
- '"EnableRemoteControlOfOtherUsers":false,'
- '"EnableSharedDeviceControl":true,'
- '"EnableLiveTvManagement":true,"EnableLiveTvAccess":true,'
- '"EnableMediaPlayback":true,'
- '"EnableAudioPlaybackTranscoding":true,'
- '"EnableVideoPlaybackTranscoding":true,'
- '"EnableContentDeletion":false,'
- '"EnableContentDownloading":true,"EnableSync":true,'
- '"EnableSyncTranscoding":true,"EnabledDevices":[],'
- '"EnableAllDevices":true,"EnabledChannels":[],'
- '"EnableAllChannels":true,"EnabledFolders":[],'
- '"EnableAllFolders":true,"InvalidLoginAttemptCount":0,'
- '"EnablePublicSharing":true}},'
- '"SessionInfo":{"SupportedCommands":[],'
- '"QueueableMediaTypes":[],"PlayableMediaTypes":[],'
- '"Id":"89f3b33f8b3a56af22088733ad1d76b3",'
- '"UserId":"2ec276a2642e54a19b612b9418a8bd3b",'
- '"UserName":"username","AdditionalUsers":[],'
- '"ApplicationVersion":"Unknown version",'
- '"Client":"Unknown app",'
- '"LastActivityDate":"2015-11-09T08:35:03.6665060Z",'
- '"DeviceName":"Unknown device","DeviceId":"Unknown device id",'
- '"SupportsRemoteControl":false,"PlayState":{"CanSeek":false,'
- '"IsPaused":false,"IsMuted":false,"RepeatMode":"RepeatNone"}},'
- '"AccessToken":"4b19180cf02748f7b95c7e8e76562fc8",'
- '"ServerId":"1efa5077976bfa92bc71652404f646ec"}')
-
- responses.add(responses.POST,
- ('http://localhost:8096'
- '/Users/AuthenticateByName'),
- body=body,
- status=200,
- content_type='application/json')
+ body = (
+ '{"User":{"Name":"username", '
+ '"ServerId":"1efa5077976bfa92bc71652404f646ec",'
+ '"Id":"2ec276a2642e54a19b612b9418a8bd3b","HasPassword":true,'
+ '"HasConfiguredPassword":true,'
+ '"HasConfiguredEasyPassword":false,'
+ '"LastLoginDate":"2015-11-09T08:35:03.6357440Z",'
+ '"LastActivityDate":"2015-11-09T08:35:03.6665060Z",'
+ '"Configuration":{"AudioLanguagePreference":"",'
+ '"PlayDefaultAudioTrack":true,"SubtitleLanguagePreference":"",'
+ '"DisplayMissingEpisodes":false,'
+ '"DisplayUnairedEpisodes":false,'
+ '"GroupMoviesIntoBoxSets":false,'
+ '"DisplayChannelsWithinViews":[],'
+ '"ExcludeFoldersFromGrouping":[],"GroupedFolders":[],'
+ '"SubtitleMode":"Default","DisplayCollectionsView":true,'
+ '"DisplayFoldersView":false,"EnableLocalPassword":false,'
+ '"OrderedViews":[],"IncludeTrailersInSuggestions":true,'
+ '"EnableCinemaMode":true,"LatestItemsExcludes":[],'
+ '"PlainFolderViews":[],"HidePlayedInLatest":true,'
+ '"DisplayChannelsInline":false},'
+ '"Policy":{"IsAdministrator":true,"IsHidden":false,'
+ '"IsDisabled":false,"BlockedTags":[],'
+ '"EnableUserPreferenceAccess":true,"AccessSchedules":[],'
+ '"BlockUnratedItems":[],'
+ '"EnableRemoteControlOfOtherUsers":false,'
+ '"EnableSharedDeviceControl":true,'
+ '"EnableLiveTvManagement":true,"EnableLiveTvAccess":true,'
+ '"EnableMediaPlayback":true,'
+ '"EnableAudioPlaybackTranscoding":true,'
+ '"EnableVideoPlaybackTranscoding":true,'
+ '"EnableContentDeletion":false,'
+ '"EnableContentDownloading":true,"EnableSync":true,'
+ '"EnableSyncTranscoding":true,"EnabledDevices":[],'
+ '"EnableAllDevices":true,"EnabledChannels":[],'
+ '"EnableAllChannels":true,"EnabledFolders":[],'
+ '"EnableAllFolders":true,"InvalidLoginAttemptCount":0,'
+ '"EnablePublicSharing":true}},'
+ '"SessionInfo":{"SupportedCommands":[],'
+ '"QueueableMediaTypes":[],"PlayableMediaTypes":[],'
+ '"Id":"89f3b33f8b3a56af22088733ad1d76b3",'
+ '"UserId":"2ec276a2642e54a19b612b9418a8bd3b",'
+ '"UserName":"username","AdditionalUsers":[],'
+ '"ApplicationVersion":"Unknown version",'
+ '"Client":"Unknown app",'
+ '"LastActivityDate":"2015-11-09T08:35:03.6665060Z",'
+ '"DeviceName":"Unknown device","DeviceId":"Unknown device id",'
+ '"SupportsRemoteControl":false,"PlayState":{"CanSeek":false,'
+ '"IsPaused":false,"IsMuted":false,"RepeatMode":"RepeatNone"}},'
+ '"AccessToken":"4b19180cf02748f7b95c7e8e76562fc8",'
+ '"ServerId":"1efa5077976bfa92bc71652404f646ec"}'
+ )
+
+ responses.add(
+ responses.POST,
+ ("http://localhost:8096" "/Users/AuthenticateByName"),
+ body=body,
+ status=200,
+ content_type="application/json",
+ )
headers = {
- 'x-emby-authorization': (
- 'MediaBrowser '
+ "x-emby-authorization": (
+ "MediaBrowser "
'UserId="e8837bc1-ad67-520e-8cd2-f629e3155721", '
'Client="other", '
'Device="beets", '
@@ -159,73 +173,76 @@ def test_get_token(self):
}
auth_data = {
- 'username': 'username',
- 'password': '5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8',
- 'passwordMd5': '5f4dcc3b5aa765d61d8327deb882cf99'
+ "username": "username",
+ "password": "5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8",
+ "passwordMd5": "5f4dcc3b5aa765d61d8327deb882cf99",
}
self.assertEqual(
- embyupdate.get_token('http://localhost', 8096, headers, auth_data),
- '4b19180cf02748f7b95c7e8e76562fc8')
+ embyupdate.get_token("http://localhost", 8096, headers, auth_data),
+ "4b19180cf02748f7b95c7e8e76562fc8",
+ )
@responses.activate
def test_get_user(self):
- body = ('[{"Name":"username",'
- '"ServerId":"1efa5077976bfa92bc71652404f646ec",'
- '"Id":"2ec276a2642e54a19b612b9418a8bd3b","HasPassword":true,'
- '"HasConfiguredPassword":true,'
- '"HasConfiguredEasyPassword":false,'
- '"LastLoginDate":"2015-11-09T08:35:03.6357440Z",'
- '"LastActivityDate":"2015-11-09T08:42:39.3693220Z",'
- '"Configuration":{"AudioLanguagePreference":"",'
- '"PlayDefaultAudioTrack":true,"SubtitleLanguagePreference":"",'
- '"DisplayMissingEpisodes":false,'
- '"DisplayUnairedEpisodes":false,'
- '"GroupMoviesIntoBoxSets":false,'
- '"DisplayChannelsWithinViews":[],'
- '"ExcludeFoldersFromGrouping":[],"GroupedFolders":[],'
- '"SubtitleMode":"Default","DisplayCollectionsView":true,'
- '"DisplayFoldersView":false,"EnableLocalPassword":false,'
- '"OrderedViews":[],"IncludeTrailersInSuggestions":true,'
- '"EnableCinemaMode":true,"LatestItemsExcludes":[],'
- '"PlainFolderViews":[],"HidePlayedInLatest":true,'
- '"DisplayChannelsInline":false},'
- '"Policy":{"IsAdministrator":true,"IsHidden":false,'
- '"IsDisabled":false,"BlockedTags":[],'
- '"EnableUserPreferenceAccess":true,"AccessSchedules":[],'
- '"BlockUnratedItems":[],'
- '"EnableRemoteControlOfOtherUsers":false,'
- '"EnableSharedDeviceControl":true,'
- '"EnableLiveTvManagement":true,"EnableLiveTvAccess":true,'
- '"EnableMediaPlayback":true,'
- '"EnableAudioPlaybackTranscoding":true,'
- '"EnableVideoPlaybackTranscoding":true,'
- '"EnableContentDeletion":false,'
- '"EnableContentDownloading":true,'
- '"EnableSync":true,"EnableSyncTranscoding":true,'
- '"EnabledDevices":[],"EnableAllDevices":true,'
- '"EnabledChannels":[],"EnableAllChannels":true,'
- '"EnabledFolders":[],"EnableAllFolders":true,'
- '"InvalidLoginAttemptCount":0,"EnablePublicSharing":true}}]')
-
- responses.add(responses.GET,
- 'http://localhost:8096/Users/Public',
- body=body,
- status=200,
- content_type='application/json')
-
- response = embyupdate.get_user('http://localhost', 8096, 'username')
-
- self.assertEqual(response[0]['Id'],
- '2ec276a2642e54a19b612b9418a8bd3b')
-
- self.assertEqual(response[0]['Name'],
- 'username')
+ body = (
+ '[{"Name":"username",'
+ '"ServerId":"1efa5077976bfa92bc71652404f646ec",'
+ '"Id":"2ec276a2642e54a19b612b9418a8bd3b","HasPassword":true,'
+ '"HasConfiguredPassword":true,'
+ '"HasConfiguredEasyPassword":false,'
+ '"LastLoginDate":"2015-11-09T08:35:03.6357440Z",'
+ '"LastActivityDate":"2015-11-09T08:42:39.3693220Z",'
+ '"Configuration":{"AudioLanguagePreference":"",'
+ '"PlayDefaultAudioTrack":true,"SubtitleLanguagePreference":"",'
+ '"DisplayMissingEpisodes":false,'
+ '"DisplayUnairedEpisodes":false,'
+ '"GroupMoviesIntoBoxSets":false,'
+ '"DisplayChannelsWithinViews":[],'
+ '"ExcludeFoldersFromGrouping":[],"GroupedFolders":[],'
+ '"SubtitleMode":"Default","DisplayCollectionsView":true,'
+ '"DisplayFoldersView":false,"EnableLocalPassword":false,'
+ '"OrderedViews":[],"IncludeTrailersInSuggestions":true,'
+ '"EnableCinemaMode":true,"LatestItemsExcludes":[],'
+ '"PlainFolderViews":[],"HidePlayedInLatest":true,'
+ '"DisplayChannelsInline":false},'
+ '"Policy":{"IsAdministrator":true,"IsHidden":false,'
+ '"IsDisabled":false,"BlockedTags":[],'
+ '"EnableUserPreferenceAccess":true,"AccessSchedules":[],'
+ '"BlockUnratedItems":[],'
+ '"EnableRemoteControlOfOtherUsers":false,'
+ '"EnableSharedDeviceControl":true,'
+ '"EnableLiveTvManagement":true,"EnableLiveTvAccess":true,'
+ '"EnableMediaPlayback":true,'
+ '"EnableAudioPlaybackTranscoding":true,'
+ '"EnableVideoPlaybackTranscoding":true,'
+ '"EnableContentDeletion":false,'
+ '"EnableContentDownloading":true,'
+ '"EnableSync":true,"EnableSyncTranscoding":true,'
+ '"EnabledDevices":[],"EnableAllDevices":true,'
+ '"EnabledChannels":[],"EnableAllChannels":true,'
+ '"EnabledFolders":[],"EnableAllFolders":true,'
+ '"InvalidLoginAttemptCount":0,"EnablePublicSharing":true}}]'
+ )
+
+ responses.add(
+ responses.GET,
+ "http://localhost:8096/Users/Public",
+ body=body,
+ status=200,
+ content_type="application/json",
+ )
+
+ response = embyupdate.get_user("http://localhost", 8096, "username")
+
+ self.assertEqual(response[0]["Id"], "2ec276a2642e54a19b612b9418a8bd3b")
+
+ self.assertEqual(response[0]["Name"], "username")
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_export.py b/test/plugins/test_export.py
index 27ad869988..1c3e5e49e8 100644
--- a/test/plugins/test_export.py
+++ b/test/plugins/test_export.py
@@ -16,49 +16,43 @@
"""
+import json
+import re # used to test csv format
import unittest
from test.helper import TestHelper
-import re # used to test csv format
-import json
-from xml.etree.ElementTree import Element
from xml.etree import ElementTree
+from xml.etree.ElementTree import Element
class ExportPluginTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
- self.load_plugins('export')
- self.test_values = {'title': 'xtitle', 'album': 'xalbum'}
+ self.load_plugins("export")
+ self.test_values = {"title": "xtitle", "album": "xalbum"}
def tearDown(self):
self.unload_plugins()
self.teardown_beets()
def execute_command(self, format_type, artist):
- query = ','.join(self.test_values.keys())
+ query = ",".join(self.test_values.keys())
out = self.run_with_output(
- 'export',
- '-f', format_type,
- '-i', query,
- artist
+ "export", "-f", format_type, "-i", query, artist
)
return out
def create_item(self):
- item, = self.add_item_fixtures()
- item.artist = 'xartist'
- item.title = self.test_values['title']
- item.album = self.test_values['album']
+ (item,) = self.add_item_fixtures()
+ item.artist = "xartist"
+ item.title = self.test_values["title"]
+ item.album = self.test_values["album"]
item.write()
item.store()
return item
def test_json_output(self):
item1 = self.create_item()
- out = self.execute_command(
- format_type='json',
- artist=item1.artist
- )
+ out = self.execute_command(format_type="json", artist=item1.artist)
json_data = json.loads(out)[0]
for key, val in self.test_values.items():
self.assertTrue(key in json_data)
@@ -66,10 +60,7 @@ def test_json_output(self):
def test_jsonlines_output(self):
item1 = self.create_item()
- out = self.execute_command(
- format_type='jsonlines',
- artist=item1.artist
- )
+ out = self.execute_command(format_type="jsonlines", artist=item1.artist)
json_data = json.loads(out)
for key, val in self.test_values.items():
self.assertTrue(key in json_data)
@@ -77,23 +68,17 @@ def test_jsonlines_output(self):
def test_csv_output(self):
item1 = self.create_item()
- out = self.execute_command(
- format_type='csv',
- artist=item1.artist
- )
- csv_list = re.split('\r', re.sub('\n', '', out))
- head = re.split(',', csv_list[0])
- vals = re.split(',|\r', csv_list[1])
+ out = self.execute_command(format_type="csv", artist=item1.artist)
+ csv_list = re.split("\r", re.sub("\n", "", out))
+ head = re.split(",", csv_list[0])
+ vals = re.split(",|\r", csv_list[1])
for index, column in enumerate(head):
self.assertTrue(self.test_values.get(column, None) is not None)
self.assertEqual(vals[index], self.test_values[column])
def test_xml_output(self):
item1 = self.create_item()
- out = self.execute_command(
- format_type='xml',
- artist=item1.artist
- )
+ out = self.execute_command(format_type="xml", artist=item1.artist)
library = ElementTree.fromstring(out)
self.assertIsInstance(library, Element)
for track in library[0]:
@@ -107,5 +92,6 @@ def test_xml_output(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_fetchart.py b/test/plugins/test_fetchart.py
index 25d7b6ebac..90a6570a2d 100644
--- a/test/plugins/test_fetchart.py
+++ b/test/plugins/test_fetchart.py
@@ -18,87 +18,88 @@
import sys
import unittest
from test.helper import TestHelper
+
from beets import util
class FetchartCliTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_beets()
- self.load_plugins('fetchart')
- self.config['fetchart']['cover_names'] = 'c\xc3\xb6ver.jpg'
- self.config['art_filename'] = 'mycover'
+ self.load_plugins("fetchart")
+ self.config["fetchart"]["cover_names"] = "c\xc3\xb6ver.jpg"
+ self.config["art_filename"] = "mycover"
self.album = self.add_album()
- self.cover_path = os.path.join(self.album.path, b'mycover.jpg')
+ self.cover_path = os.path.join(self.album.path, b"mycover.jpg")
def tearDown(self):
self.unload_plugins()
self.teardown_beets()
def check_cover_is_stored(self):
- self.assertEqual(self.album['artpath'], self.cover_path)
+ self.assertEqual(self.album["artpath"], self.cover_path)
with open(util.syspath(self.cover_path)) as f:
- self.assertEqual(f.read(), 'IMAGE')
+ self.assertEqual(f.read(), "IMAGE")
def hide_file_windows(self):
hidden_mask = 2
- success = ctypes.windll.kernel32.SetFileAttributesW(self.cover_path,
- hidden_mask)
+ success = ctypes.windll.kernel32.SetFileAttributesW(
+ self.cover_path, hidden_mask
+ )
if not success:
self.skipTest("unable to set file attributes")
def test_set_art_from_folder(self):
- self.touch(b'c\xc3\xb6ver.jpg', dir=self.album.path, content='IMAGE')
+ self.touch(b"c\xc3\xb6ver.jpg", dir=self.album.path, content="IMAGE")
- self.run_command('fetchart')
+ self.run_command("fetchart")
self.album.load()
self.check_cover_is_stored()
def test_filesystem_does_not_pick_up_folder(self):
- os.makedirs(os.path.join(self.album.path, b'mycover.jpg'))
- self.run_command('fetchart')
+ os.makedirs(os.path.join(self.album.path, b"mycover.jpg"))
+ self.run_command("fetchart")
self.album.load()
- self.assertEqual(self.album['artpath'], None)
+ self.assertEqual(self.album["artpath"], None)
def test_filesystem_does_not_pick_up_ignored_file(self):
- self.touch(b'co_ver.jpg', dir=self.album.path, content='IMAGE')
- self.config['ignore'] = ['*_*']
- self.run_command('fetchart')
+ self.touch(b"co_ver.jpg", dir=self.album.path, content="IMAGE")
+ self.config["ignore"] = ["*_*"]
+ self.run_command("fetchart")
self.album.load()
- self.assertEqual(self.album['artpath'], None)
+ self.assertEqual(self.album["artpath"], None)
def test_filesystem_picks_up_non_ignored_file(self):
- self.touch(b'cover.jpg', dir=self.album.path, content='IMAGE')
- self.config['ignore'] = ['*_*']
- self.run_command('fetchart')
+ self.touch(b"cover.jpg", dir=self.album.path, content="IMAGE")
+ self.config["ignore"] = ["*_*"]
+ self.run_command("fetchart")
self.album.load()
self.check_cover_is_stored()
def test_filesystem_does_not_pick_up_hidden_file(self):
- self.touch(b'.cover.jpg', dir=self.album.path, content='IMAGE')
- if sys.platform == 'win32':
+ self.touch(b".cover.jpg", dir=self.album.path, content="IMAGE")
+ if sys.platform == "win32":
self.hide_file_windows()
- self.config['ignore'] = [] # By default, ignore includes '.*'.
- self.config['ignore_hidden'] = True
- self.run_command('fetchart')
+ self.config["ignore"] = [] # By default, ignore includes '.*'.
+ self.config["ignore_hidden"] = True
+ self.run_command("fetchart")
self.album.load()
- self.assertEqual(self.album['artpath'], None)
+ self.assertEqual(self.album["artpath"], None)
def test_filesystem_picks_up_non_hidden_file(self):
- self.touch(b'cover.jpg', dir=self.album.path, content='IMAGE')
- self.config['ignore_hidden'] = True
- self.run_command('fetchart')
+ self.touch(b"cover.jpg", dir=self.album.path, content="IMAGE")
+ self.config["ignore_hidden"] = True
+ self.run_command("fetchart")
self.album.load()
self.check_cover_is_stored()
def test_filesystem_picks_up_hidden_file(self):
- self.touch(b'.cover.jpg', dir=self.album.path, content='IMAGE')
- if sys.platform == 'win32':
+ self.touch(b".cover.jpg", dir=self.album.path, content="IMAGE")
+ if sys.platform == "win32":
self.hide_file_windows()
- self.config['ignore'] = [] # By default, ignore includes '.*'.
- self.config['ignore_hidden'] = False
- self.run_command('fetchart')
+ self.config["ignore"] = [] # By default, ignore includes '.*'.
+ self.config["ignore_hidden"] = False
+ self.run_command("fetchart")
self.album.load()
self.check_cover_is_stored()
@@ -106,5 +107,6 @@ def test_filesystem_picks_up_hidden_file(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_filefilter.py b/test/plugins/test_filefilter.py
index 2eaaa96da2..e5c0f615a8 100644
--- a/test/plugins/test_filefilter.py
+++ b/test/plugins/test_filefilter.py
@@ -19,13 +19,14 @@
import os
import shutil
import unittest
-
from test import _common
from test.helper import capture_log
from test.test_importer import ImportHelper
-from beets import config
+
from mediafile import MediaFile
-from beets.util import displayable_path, bytestring_path, syspath
+
+from beets import config
+from beets.util import bytestring_path, displayable_path, syspath
from beetsplug.filefilter import FileFilterPlugin
@@ -34,14 +35,14 @@ def setUp(self):
self.setup_beets()
self.__create_import_dir(2)
self._setup_import_session()
- config['import']['pretend'] = True
+ config["import"]["pretend"] = True
def tearDown(self):
self.teardown_beets()
def __copy_file(self, dest_path, metadata):
# Copy files
- resource_path = os.path.join(_common.RSRC, b'full.mp3')
+ resource_path = os.path.join(_common.RSRC, b"full.mp3")
shutil.copy(syspath(resource_path), syspath(dest_path))
medium = MediaFile(dest_path)
# Set metadata
@@ -50,55 +51,55 @@ def __copy_file(self, dest_path, metadata):
medium.save()
def __create_import_dir(self, count):
- self.import_dir = os.path.join(self.temp_dir, b'testsrcdir')
+ self.import_dir = os.path.join(self.temp_dir, b"testsrcdir")
if os.path.isdir(syspath(self.import_dir)):
shutil.rmtree(syspath(self.import_dir))
- self.artist_path = os.path.join(self.import_dir, b'artist')
- self.album_path = os.path.join(self.artist_path, b'album')
- self.misc_path = os.path.join(self.import_dir, b'misc')
+ self.artist_path = os.path.join(self.import_dir, b"artist")
+ self.album_path = os.path.join(self.artist_path, b"album")
+ self.misc_path = os.path.join(self.import_dir, b"misc")
os.makedirs(syspath(self.album_path))
os.makedirs(syspath(self.misc_path))
metadata = {
- 'artist': 'Tag Artist',
- 'album': 'Tag Album',
- 'albumartist': None,
- 'mb_trackid': None,
- 'mb_albumid': None,
- 'comp': None,
+ "artist": "Tag Artist",
+ "album": "Tag Album",
+ "albumartist": None,
+ "mb_trackid": None,
+ "mb_albumid": None,
+ "comp": None,
}
self.album_paths = []
for i in range(count):
- metadata['track'] = i + 1
- metadata['title'] = 'Tag Title Album %d' % (i + 1)
- track_file = bytestring_path('%02d - track.mp3' % (i + 1))
+ metadata["track"] = i + 1
+ metadata["title"] = "Tag Title Album %d" % (i + 1)
+ track_file = bytestring_path("%02d - track.mp3" % (i + 1))
dest_path = os.path.join(self.album_path, track_file)
self.__copy_file(dest_path, metadata)
self.album_paths.append(dest_path)
self.artist_paths = []
- metadata['album'] = None
+ metadata["album"] = None
for i in range(count):
- metadata['track'] = i + 10
- metadata['title'] = 'Tag Title Artist %d' % (i + 1)
- track_file = bytestring_path('track_%d.mp3' % (i + 1))
+ metadata["track"] = i + 10
+ metadata["title"] = "Tag Title Artist %d" % (i + 1)
+ track_file = bytestring_path("track_%d.mp3" % (i + 1))
dest_path = os.path.join(self.artist_path, track_file)
self.__copy_file(dest_path, metadata)
self.artist_paths.append(dest_path)
self.misc_paths = []
for i in range(count):
- metadata['artist'] = 'Artist %d' % (i + 42)
- metadata['track'] = i + 5
- metadata['title'] = 'Tag Title Misc %d' % (i + 1)
- track_file = bytestring_path('track_%d.mp3' % (i + 1))
+ metadata["artist"] = "Artist %d" % (i + 42)
+ metadata["track"] = i + 5
+ metadata["title"] = "Tag Title Misc %d" % (i + 1)
+ track_file = bytestring_path("track_%d.mp3" % (i + 1))
dest_path = os.path.join(self.misc_path, track_file)
self.__copy_file(dest_path, metadata)
self.misc_paths.append(dest_path)
def __run(self, expected_lines, singletons=False):
- self.load_plugins('filefilter')
+ self.load_plugins("filefilter")
import_files = [self.import_dir]
self._setup_import_session(singletons=singletons)
@@ -109,99 +110,122 @@ def __run(self, expected_lines, singletons=False):
self.unload_plugins()
FileFilterPlugin.listeners = None
- logs = [line for line in logs if not line.startswith('Sending event:')]
+ logs = [line for line in logs if not line.startswith("Sending event:")]
self.assertEqual(logs, expected_lines)
def test_import_default(self):
- """ The default configuration should import everything.
- """
- self.__run([
- 'Album: %s' % displayable_path(self.artist_path),
- ' %s' % displayable_path(self.artist_paths[0]),
- ' %s' % displayable_path(self.artist_paths[1]),
- 'Album: %s' % displayable_path(self.album_path),
- ' %s' % displayable_path(self.album_paths[0]),
- ' %s' % displayable_path(self.album_paths[1]),
- 'Album: %s' % displayable_path(self.misc_path),
- ' %s' % displayable_path(self.misc_paths[0]),
- ' %s' % displayable_path(self.misc_paths[1])
- ])
+ """The default configuration should import everything."""
+ self.__run(
+ [
+ "Album: %s" % displayable_path(self.artist_path),
+ " %s" % displayable_path(self.artist_paths[0]),
+ " %s" % displayable_path(self.artist_paths[1]),
+ "Album: %s" % displayable_path(self.album_path),
+ " %s" % displayable_path(self.album_paths[0]),
+ " %s" % displayable_path(self.album_paths[1]),
+ "Album: %s" % displayable_path(self.misc_path),
+ " %s" % displayable_path(self.misc_paths[0]),
+ " %s" % displayable_path(self.misc_paths[1]),
+ ]
+ )
def test_import_nothing(self):
- config['filefilter']['path'] = 'not_there'
- self.__run(['No files imported from %s' % displayable_path(
- self.import_dir)])
+ config["filefilter"]["path"] = "not_there"
+ self.__run(
+ ["No files imported from %s" % displayable_path(self.import_dir)]
+ )
# Global options
def test_import_global(self):
- config['filefilter']['path'] = '.*track_1.*\\.mp3'
- self.__run([
- 'Album: %s' % displayable_path(self.artist_path),
- ' %s' % displayable_path(self.artist_paths[0]),
- 'Album: %s' % displayable_path(self.misc_path),
- ' %s' % displayable_path(self.misc_paths[0]),
- ])
- self.__run([
- 'Singleton: %s' % displayable_path(self.artist_paths[0]),
- 'Singleton: %s' % displayable_path(self.misc_paths[0])
- ], singletons=True)
+ config["filefilter"]["path"] = ".*track_1.*\\.mp3"
+ self.__run(
+ [
+ "Album: %s" % displayable_path(self.artist_path),
+ " %s" % displayable_path(self.artist_paths[0]),
+ "Album: %s" % displayable_path(self.misc_path),
+ " %s" % displayable_path(self.misc_paths[0]),
+ ]
+ )
+ self.__run(
+ [
+ "Singleton: %s" % displayable_path(self.artist_paths[0]),
+ "Singleton: %s" % displayable_path(self.misc_paths[0]),
+ ],
+ singletons=True,
+ )
# Album options
def test_import_album(self):
- config['filefilter']['album_path'] = '.*track_1.*\\.mp3'
- self.__run([
- 'Album: %s' % displayable_path(self.artist_path),
- ' %s' % displayable_path(self.artist_paths[0]),
- 'Album: %s' % displayable_path(self.misc_path),
- ' %s' % displayable_path(self.misc_paths[0]),
- ])
- self.__run([
- 'Singleton: %s' % displayable_path(self.artist_paths[0]),
- 'Singleton: %s' % displayable_path(self.artist_paths[1]),
- 'Singleton: %s' % displayable_path(self.album_paths[0]),
- 'Singleton: %s' % displayable_path(self.album_paths[1]),
- 'Singleton: %s' % displayable_path(self.misc_paths[0]),
- 'Singleton: %s' % displayable_path(self.misc_paths[1])
- ], singletons=True)
+ config["filefilter"]["album_path"] = ".*track_1.*\\.mp3"
+ self.__run(
+ [
+ "Album: %s" % displayable_path(self.artist_path),
+ " %s" % displayable_path(self.artist_paths[0]),
+ "Album: %s" % displayable_path(self.misc_path),
+ " %s" % displayable_path(self.misc_paths[0]),
+ ]
+ )
+ self.__run(
+ [
+ "Singleton: %s" % displayable_path(self.artist_paths[0]),
+ "Singleton: %s" % displayable_path(self.artist_paths[1]),
+ "Singleton: %s" % displayable_path(self.album_paths[0]),
+ "Singleton: %s" % displayable_path(self.album_paths[1]),
+ "Singleton: %s" % displayable_path(self.misc_paths[0]),
+ "Singleton: %s" % displayable_path(self.misc_paths[1]),
+ ],
+ singletons=True,
+ )
# Singleton options
def test_import_singleton(self):
- config['filefilter']['singleton_path'] = '.*track_1.*\\.mp3'
- self.__run([
- 'Singleton: %s' % displayable_path(self.artist_paths[0]),
- 'Singleton: %s' % displayable_path(self.misc_paths[0])
- ], singletons=True)
- self.__run([
- 'Album: %s' % displayable_path(self.artist_path),
- ' %s' % displayable_path(self.artist_paths[0]),
- ' %s' % displayable_path(self.artist_paths[1]),
- 'Album: %s' % displayable_path(self.album_path),
- ' %s' % displayable_path(self.album_paths[0]),
- ' %s' % displayable_path(self.album_paths[1]),
- 'Album: %s' % displayable_path(self.misc_path),
- ' %s' % displayable_path(self.misc_paths[0]),
- ' %s' % displayable_path(self.misc_paths[1])
- ])
+ config["filefilter"]["singleton_path"] = ".*track_1.*\\.mp3"
+ self.__run(
+ [
+ "Singleton: %s" % displayable_path(self.artist_paths[0]),
+ "Singleton: %s" % displayable_path(self.misc_paths[0]),
+ ],
+ singletons=True,
+ )
+ self.__run(
+ [
+ "Album: %s" % displayable_path(self.artist_path),
+ " %s" % displayable_path(self.artist_paths[0]),
+ " %s" % displayable_path(self.artist_paths[1]),
+ "Album: %s" % displayable_path(self.album_path),
+ " %s" % displayable_path(self.album_paths[0]),
+ " %s" % displayable_path(self.album_paths[1]),
+ "Album: %s" % displayable_path(self.misc_path),
+ " %s" % displayable_path(self.misc_paths[0]),
+ " %s" % displayable_path(self.misc_paths[1]),
+ ]
+ )
# Album and singleton options
def test_import_both(self):
- config['filefilter']['album_path'] = '.*track_1.*\\.mp3'
- config['filefilter']['singleton_path'] = '.*track_2.*\\.mp3'
- self.__run([
- 'Album: %s' % displayable_path(self.artist_path),
- ' %s' % displayable_path(self.artist_paths[0]),
- 'Album: %s' % displayable_path(self.misc_path),
- ' %s' % displayable_path(self.misc_paths[0]),
- ])
- self.__run([
- 'Singleton: %s' % displayable_path(self.artist_paths[1]),
- 'Singleton: %s' % displayable_path(self.misc_paths[1])
- ], singletons=True)
+ config["filefilter"]["album_path"] = ".*track_1.*\\.mp3"
+ config["filefilter"]["singleton_path"] = ".*track_2.*\\.mp3"
+ self.__run(
+ [
+ "Album: %s" % displayable_path(self.artist_path),
+ " %s" % displayable_path(self.artist_paths[0]),
+ "Album: %s" % displayable_path(self.misc_path),
+ " %s" % displayable_path(self.misc_paths[0]),
+ ]
+ )
+ self.__run(
+ [
+ "Singleton: %s" % displayable_path(self.artist_paths[1]),
+ "Singleton: %s" % displayable_path(self.misc_paths[1]),
+ ],
+ singletons=True,
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_ftintitle.py b/test/plugins/test_ftintitle.py
index 692f2ef571..10ffd4f729 100644
--- a/test/plugins/test_ftintitle.py
+++ b/test/plugins/test_ftintitle.py
@@ -17,6 +17,7 @@
import unittest
from test.helper import TestHelper
+
from beetsplug import ftintitle
@@ -24,60 +25,62 @@ class FtInTitlePluginFunctional(unittest.TestCase, TestHelper):
def setUp(self):
"""Set up configuration"""
self.setup_beets()
- self.load_plugins('ftintitle')
+ self.load_plugins("ftintitle")
def tearDown(self):
self.unload_plugins()
self.teardown_beets()
def _ft_add_item(self, path, artist, title, aartist):
- return self.add_item(path=path,
- artist=artist,
- artist_sort=artist,
- title=title,
- albumartist=aartist)
+ return self.add_item(
+ path=path,
+ artist=artist,
+ artist_sort=artist,
+ title=title,
+ albumartist=aartist,
+ )
def _ft_set_config(self, ftformat, drop=False, auto=True):
- self.config['ftintitle']['format'] = ftformat
- self.config['ftintitle']['drop'] = drop
- self.config['ftintitle']['auto'] = auto
+ self.config["ftintitle"]["format"] = ftformat
+ self.config["ftintitle"]["drop"] = drop
+ self.config["ftintitle"]["auto"] = auto
def test_functional_drop(self):
- item = self._ft_add_item('/', 'Alice ft Bob', 'Song 1', 'Alice')
- self.run_command('ftintitle', '-d')
+ item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice")
+ self.run_command("ftintitle", "-d")
item.load()
- self.assertEqual(item['artist'], 'Alice')
- self.assertEqual(item['title'], 'Song 1')
+ self.assertEqual(item["artist"], "Alice")
+ self.assertEqual(item["title"], "Song 1")
def test_functional_not_found(self):
- item = self._ft_add_item('/', 'Alice ft Bob', 'Song 1', 'George')
- self.run_command('ftintitle', '-d')
+ item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "George")
+ self.run_command("ftintitle", "-d")
item.load()
# item should be unchanged
- self.assertEqual(item['artist'], 'Alice ft Bob')
- self.assertEqual(item['title'], 'Song 1')
+ self.assertEqual(item["artist"], "Alice ft Bob")
+ self.assertEqual(item["title"], "Song 1")
def test_functional_custom_format(self):
- self._ft_set_config('feat. {0}')
- item = self._ft_add_item('/', 'Alice ft Bob', 'Song 1', 'Alice')
- self.run_command('ftintitle')
+ self._ft_set_config("feat. {0}")
+ item = self._ft_add_item("/", "Alice ft Bob", "Song 1", "Alice")
+ self.run_command("ftintitle")
item.load()
- self.assertEqual(item['artist'], 'Alice')
- self.assertEqual(item['title'], 'Song 1 feat. Bob')
+ self.assertEqual(item["artist"], "Alice")
+ self.assertEqual(item["title"], "Song 1 feat. Bob")
- self._ft_set_config('featuring {0}')
- item = self._ft_add_item('/', 'Alice feat. Bob', 'Song 1', 'Alice')
- self.run_command('ftintitle')
+ self._ft_set_config("featuring {0}")
+ item = self._ft_add_item("/", "Alice feat. Bob", "Song 1", "Alice")
+ self.run_command("ftintitle")
item.load()
- self.assertEqual(item['artist'], 'Alice')
- self.assertEqual(item['title'], 'Song 1 featuring Bob')
+ self.assertEqual(item["artist"], "Alice")
+ self.assertEqual(item["title"], "Song 1 featuring Bob")
- self._ft_set_config('with {0}')
- item = self._ft_add_item('/', 'Alice feat Bob', 'Song 1', 'Alice')
- self.run_command('ftintitle')
+ self._ft_set_config("with {0}")
+ item = self._ft_add_item("/", "Alice feat Bob", "Song 1", "Alice")
+ self.run_command("ftintitle")
item.load()
- self.assertEqual(item['artist'], 'Alice')
- self.assertEqual(item['title'], 'Song 1 with Bob')
+ self.assertEqual(item["artist"], "Alice")
+ self.assertEqual(item["title"], "Song 1 with Bob")
class FtInTitlePluginTest(unittest.TestCase):
@@ -88,96 +91,96 @@ def setUp(self):
def test_find_feat_part(self):
test_cases = [
{
- 'artist': 'Alice ft. Bob',
- 'album_artist': 'Alice',
- 'feat_part': 'Bob'
+ "artist": "Alice ft. Bob",
+ "album_artist": "Alice",
+ "feat_part": "Bob",
},
{
- 'artist': 'Alice feat Bob',
- 'album_artist': 'Alice',
- 'feat_part': 'Bob'
+ "artist": "Alice feat Bob",
+ "album_artist": "Alice",
+ "feat_part": "Bob",
},
{
- 'artist': 'Alice featuring Bob',
- 'album_artist': 'Alice',
- 'feat_part': 'Bob'
+ "artist": "Alice featuring Bob",
+ "album_artist": "Alice",
+ "feat_part": "Bob",
},
{
- 'artist': 'Alice & Bob',
- 'album_artist': 'Alice',
- 'feat_part': 'Bob'
+ "artist": "Alice & Bob",
+ "album_artist": "Alice",
+ "feat_part": "Bob",
},
{
- 'artist': 'Alice and Bob',
- 'album_artist': 'Alice',
- 'feat_part': 'Bob'
+ "artist": "Alice and Bob",
+ "album_artist": "Alice",
+ "feat_part": "Bob",
},
{
- 'artist': 'Alice With Bob',
- 'album_artist': 'Alice',
- 'feat_part': 'Bob'
+ "artist": "Alice With Bob",
+ "album_artist": "Alice",
+ "feat_part": "Bob",
},
{
- 'artist': 'Alice defeat Bob',
- 'album_artist': 'Alice',
- 'feat_part': None
+ "artist": "Alice defeat Bob",
+ "album_artist": "Alice",
+ "feat_part": None,
},
{
- 'artist': 'Alice & Bob',
- 'album_artist': 'Bob',
- 'feat_part': 'Alice'
+ "artist": "Alice & Bob",
+ "album_artist": "Bob",
+ "feat_part": "Alice",
},
{
- 'artist': 'Alice ft. Bob',
- 'album_artist': 'Bob',
- 'feat_part': 'Alice'
+ "artist": "Alice ft. Bob",
+ "album_artist": "Bob",
+ "feat_part": "Alice",
},
{
- 'artist': 'Alice ft. Carol',
- 'album_artist': 'Bob',
- 'feat_part': None
+ "artist": "Alice ft. Carol",
+ "album_artist": "Bob",
+ "feat_part": None,
},
]
for test_case in test_cases:
feat_part = ftintitle.find_feat_part(
- test_case['artist'],
- test_case['album_artist']
+ test_case["artist"], test_case["album_artist"]
)
- self.assertEqual(feat_part, test_case['feat_part'])
+ self.assertEqual(feat_part, test_case["feat_part"])
def test_split_on_feat(self):
- parts = ftintitle.split_on_feat('Alice ft. Bob')
- self.assertEqual(parts, ('Alice', 'Bob'))
- parts = ftintitle.split_on_feat('Alice feat Bob')
- self.assertEqual(parts, ('Alice', 'Bob'))
- parts = ftintitle.split_on_feat('Alice feat. Bob')
- self.assertEqual(parts, ('Alice', 'Bob'))
- parts = ftintitle.split_on_feat('Alice featuring Bob')
- self.assertEqual(parts, ('Alice', 'Bob'))
- parts = ftintitle.split_on_feat('Alice & Bob')
- self.assertEqual(parts, ('Alice', 'Bob'))
- parts = ftintitle.split_on_feat('Alice and Bob')
- self.assertEqual(parts, ('Alice', 'Bob'))
- parts = ftintitle.split_on_feat('Alice With Bob')
- self.assertEqual(parts, ('Alice', 'Bob'))
- parts = ftintitle.split_on_feat('Alice defeat Bob')
- self.assertEqual(parts, ('Alice defeat Bob', None))
+ parts = ftintitle.split_on_feat("Alice ft. Bob")
+ self.assertEqual(parts, ("Alice", "Bob"))
+ parts = ftintitle.split_on_feat("Alice feat Bob")
+ self.assertEqual(parts, ("Alice", "Bob"))
+ parts = ftintitle.split_on_feat("Alice feat. Bob")
+ self.assertEqual(parts, ("Alice", "Bob"))
+ parts = ftintitle.split_on_feat("Alice featuring Bob")
+ self.assertEqual(parts, ("Alice", "Bob"))
+ parts = ftintitle.split_on_feat("Alice & Bob")
+ self.assertEqual(parts, ("Alice", "Bob"))
+ parts = ftintitle.split_on_feat("Alice and Bob")
+ self.assertEqual(parts, ("Alice", "Bob"))
+ parts = ftintitle.split_on_feat("Alice With Bob")
+ self.assertEqual(parts, ("Alice", "Bob"))
+ parts = ftintitle.split_on_feat("Alice defeat Bob")
+ self.assertEqual(parts, ("Alice defeat Bob", None))
def test_contains_feat(self):
- self.assertTrue(ftintitle.contains_feat('Alice ft. Bob'))
- self.assertTrue(ftintitle.contains_feat('Alice feat. Bob'))
- self.assertTrue(ftintitle.contains_feat('Alice feat Bob'))
- self.assertTrue(ftintitle.contains_feat('Alice featuring Bob'))
- self.assertTrue(ftintitle.contains_feat('Alice & Bob'))
- self.assertTrue(ftintitle.contains_feat('Alice and Bob'))
- self.assertTrue(ftintitle.contains_feat('Alice With Bob'))
- self.assertFalse(ftintitle.contains_feat('Alice defeat Bob'))
- self.assertFalse(ftintitle.contains_feat('Aliceft.Bob'))
+ self.assertTrue(ftintitle.contains_feat("Alice ft. Bob"))
+ self.assertTrue(ftintitle.contains_feat("Alice feat. Bob"))
+ self.assertTrue(ftintitle.contains_feat("Alice feat Bob"))
+ self.assertTrue(ftintitle.contains_feat("Alice featuring Bob"))
+ self.assertTrue(ftintitle.contains_feat("Alice & Bob"))
+ self.assertTrue(ftintitle.contains_feat("Alice and Bob"))
+ self.assertTrue(ftintitle.contains_feat("Alice With Bob"))
+ self.assertFalse(ftintitle.contains_feat("Alice defeat Bob"))
+ self.assertFalse(ftintitle.contains_feat("Aliceft.Bob"))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_hook.py b/test/plugins/test_hook.py
index 5049b5d24b..d702b9894e 100644
--- a/test/plugins/test_hook.py
+++ b/test/plugins/test_hook.py
@@ -17,12 +17,10 @@
import sys
import tempfile
import unittest
-
from test import _common
from test.helper import TestHelper, capture_log
-from beets import config
-from beets import plugins
+from beets import config, plugins
def get_temporary_path():
@@ -43,82 +41,81 @@ def tearDown(self):
self.teardown_beets()
def _add_hook(self, event, command):
- hook = {
- 'event': event,
- 'command': command
- }
+ hook = {"event": event, "command": command}
- hooks = config['hook']['hooks'].get(list) if 'hook' in config else []
+ hooks = config["hook"]["hooks"].get(list) if "hook" in config else []
hooks.append(hook)
- config['hook']['hooks'] = hooks
+ config["hook"]["hooks"] = hooks
def test_hook_empty_command(self):
- self._add_hook('test_event', '')
+ self._add_hook("test_event", "")
- self.load_plugins('hook')
+ self.load_plugins("hook")
- with capture_log('beets.hook') as logs:
- plugins.send('test_event')
+ with capture_log("beets.hook") as logs:
+ plugins.send("test_event")
self.assertIn('hook: invalid command ""', logs)
# FIXME: fails on windows
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_hook_non_zero_exit(self):
- self._add_hook('test_event', 'sh -c "exit 1"')
+ self._add_hook("test_event", 'sh -c "exit 1"')
- self.load_plugins('hook')
+ self.load_plugins("hook")
- with capture_log('beets.hook') as logs:
- plugins.send('test_event')
+ with capture_log("beets.hook") as logs:
+ plugins.send("test_event")
- self.assertIn('hook: hook for test_event exited with status 1', logs)
+ self.assertIn("hook: hook for test_event exited with status 1", logs)
def test_hook_non_existent_command(self):
- self._add_hook('test_event', 'non-existent-command')
+ self._add_hook("test_event", "non-existent-command")
- self.load_plugins('hook')
+ self.load_plugins("hook")
- with capture_log('beets.hook') as logs:
- plugins.send('test_event')
+ with capture_log("beets.hook") as logs:
+ plugins.send("test_event")
- self.assertTrue(any(
- message.startswith("hook: hook for test_event failed: ")
- for message in logs))
+ self.assertTrue(
+ any(
+ message.startswith("hook: hook for test_event failed: ")
+ for message in logs
+ )
+ )
# FIXME: fails on windows
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_hook_no_arguments(self):
temporary_paths = [
get_temporary_path() for i in range(self.TEST_HOOK_COUNT)
]
for index, path in enumerate(temporary_paths):
- self._add_hook(f'test_no_argument_event_{index}',
- f'touch "{path}"')
+ self._add_hook(f"test_no_argument_event_{index}", f'touch "{path}"')
- self.load_plugins('hook')
+ self.load_plugins("hook")
for index in range(len(temporary_paths)):
- plugins.send(f'test_no_argument_event_{index}')
+ plugins.send(f"test_no_argument_event_{index}")
for path in temporary_paths:
self.assertTrue(os.path.isfile(path))
os.remove(path)
# FIXME: fails on windows
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_hook_event_substitution(self):
temporary_directory = tempfile._get_default_tempdir()
- event_names = [f'test_event_event_{i}' for i in
- range(self.TEST_HOOK_COUNT)]
+ event_names = [
+ f"test_event_event_{i}" for i in range(self.TEST_HOOK_COUNT)
+ ]
for event in event_names:
- self._add_hook(event,
- f'touch "{temporary_directory}/{{event}}"')
+ self._add_hook(event, f'touch "{temporary_directory}/{{event}}"')
- self.load_plugins('hook')
+ self.load_plugins("hook")
for event in event_names:
plugins.send(event)
@@ -130,41 +127,39 @@ def test_hook_event_substitution(self):
os.remove(path)
# FIXME: fails on windows
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_hook_argument_substitution(self):
temporary_paths = [
get_temporary_path() for i in range(self.TEST_HOOK_COUNT)
]
for index, path in enumerate(temporary_paths):
- self._add_hook(f'test_argument_event_{index}',
- 'touch "{path}"')
+ self._add_hook(f"test_argument_event_{index}", 'touch "{path}"')
- self.load_plugins('hook')
+ self.load_plugins("hook")
for index, path in enumerate(temporary_paths):
- plugins.send(f'test_argument_event_{index}', path=path)
+ plugins.send(f"test_argument_event_{index}", path=path)
for path in temporary_paths:
self.assertTrue(os.path.isfile(path))
os.remove(path)
# FIXME: fails on windows
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_hook_bytes_interpolation(self):
temporary_paths = [
- get_temporary_path().encode('utf-8')
+ get_temporary_path().encode("utf-8")
for i in range(self.TEST_HOOK_COUNT)
]
for index, path in enumerate(temporary_paths):
- self._add_hook(f'test_bytes_event_{index}',
- 'touch "{path}"')
+ self._add_hook(f"test_bytes_event_{index}", 'touch "{path}"')
- self.load_plugins('hook')
+ self.load_plugins("hook")
for index, path in enumerate(temporary_paths):
- plugins.send(f'test_bytes_event_{index}', path=path)
+ plugins.send(f"test_bytes_event_{index}", path=path)
for path in temporary_paths:
self.assertTrue(os.path.isfile(path))
@@ -174,5 +169,6 @@ def test_hook_bytes_interpolation(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_ihate.py b/test/plugins/test_ihate.py
index 3ba4e67de4..5f81269b78 100644
--- a/test/plugins/test_ihate.py
+++ b/test/plugins/test_ihate.py
@@ -2,20 +2,18 @@
import unittest
+
from beets import importer
from beets.library import Item
from beetsplug.ihate import IHatePlugin
class IHatePluginTest(unittest.TestCase):
-
def test_hate(self):
-
match_pattern = {}
test_item = Item(
- genre='TestGenre',
- album='TestAlbum',
- artist='TestArtist')
+ genre="TestGenre", album="TestAlbum", artist="TestArtist"
+ )
task = importer.SingletonImportTask(None, test_item)
# Empty query should let it pass.
@@ -34,18 +32,23 @@ def test_hate(self):
self.assertFalse(IHatePlugin.do_i_hate_this(task, match_pattern))
# Both queries are blocked by AND clause with unmatched condition.
- match_pattern = ["album:notthis genre:testgenre",
- "artist:testartist album:notthis"]
+ match_pattern = [
+ "album:notthis genre:testgenre",
+ "artist:testartist album:notthis",
+ ]
self.assertFalse(IHatePlugin.do_i_hate_this(task, match_pattern))
# Only one query should fire.
- match_pattern = ["album:testalbum genre:testgenre",
- "artist:testartist album:notthis"]
+ match_pattern = [
+ "album:testalbum genre:testgenre",
+ "artist:testartist album:notthis",
+ ]
self.assertTrue(IHatePlugin.do_i_hate_this(task, match_pattern))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_importadded.py b/test/plugins/test_importadded.py
index 6644c845f1..35292355a3 100644
--- a/test/plugins/test_importadded.py
+++ b/test/plugins/test_importadded.py
@@ -17,8 +17,8 @@
import os
import unittest
+from test.test_importer import AutotagStub, ImportHelper
-from test.test_importer import ImportHelper, AutotagStub
from beets import importer
from beets.util import displayable_path, syspath
from beetsplug.importadded import ImportAddedPlugin
@@ -41,20 +41,20 @@ def modify_mtimes(paths, offset=-60000):
class ImportAddedTest(unittest.TestCase, ImportHelper):
-
# The minimum mtime of the files to be imported
min_mtime = None
def setUp(self):
preserve_plugin_listeners()
self.setup_beets()
- self.load_plugins('importadded')
+ self.load_plugins("importadded")
self._create_import_dir(2)
# Different mtimes on the files to be imported in order to test the
# plugin
modify_mtimes(mfile.path for mfile in self.media_files)
- self.min_mtime = min(os.path.getmtime(mfile.path)
- for mfile in self.media_files)
+ self.min_mtime = min(
+ os.path.getmtime(mfile.path) for mfile in self.media_files
+ )
self.matcher = AutotagStub().install()
self.matcher.macthin = AutotagStub.GOOD
self._setup_import_session()
@@ -68,10 +68,11 @@ def tearDown(self):
def find_media_file(self, item):
"""Find the pre-import MediaFile for an Item"""
for m in self.media_files:
- if m.title.replace('Tag', 'Applied') == item.title:
+ if m.title.replace("Tag", "Applied") == item.title:
return m
- raise AssertionError("No MediaFile found for Item " +
- displayable_path(item.path))
+ raise AssertionError(
+ "No MediaFile found for Item " + displayable_path(item.path)
+ )
def assertEqualTimes(self, first, second, msg=None): # noqa
"""For comparing file modification times at a sufficient precision"""
@@ -88,14 +89,14 @@ def test_import_album_with_added_dates(self):
self.assertAlbumImport()
def test_import_album_inplace_with_added_dates(self):
- self.config['import']['copy'] = False
- self.config['import']['move'] = False
- self.config['import']['link'] = False
- self.config['import']['hardlink'] = False
+ self.config["import"]["copy"] = False
+ self.config["import"]["move"] = False
+ self.config["import"]["link"] = False
+ self.config["import"]["hardlink"] = False
self.assertAlbumImport()
def test_import_album_with_preserved_mtimes(self):
- self.config['importadded']['preserve_mtimes'] = True
+ self.config["importadded"]["preserve_mtimes"] = True
self.importer.run()
album = self.lib.albums().get()
self.assertEqual(album.added, self.min_mtime)
@@ -103,16 +104,14 @@ def test_import_album_with_preserved_mtimes(self):
self.assertEqualTimes(item.added, self.min_mtime)
mediafile_mtime = os.path.getmtime(self.find_media_file(item).path)
self.assertEqualTimes(item.mtime, mediafile_mtime)
- self.assertEqualTimes(os.path.getmtime(item.path),
- mediafile_mtime)
+ self.assertEqualTimes(os.path.getmtime(item.path), mediafile_mtime)
def test_reimported_album_skipped(self):
# Import and record the original added dates
self.importer.run()
album = self.lib.albums().get()
album_added_before = album.added
- items_added_before = {item.path: item.added
- for item in album.items()}
+ items_added_before = {item.path: item.added for item in album.items()}
# Newer Item path mtimes as if Beets had modified them
modify_mtimes(items_added_before.keys(), offset=10000)
# Reimport
@@ -121,37 +120,39 @@ def test_reimported_album_skipped(self):
# Verify the reimported items
album = self.lib.albums().get()
self.assertEqualTimes(album.added, album_added_before)
- items_added_after = {item.path: item.added
- for item in album.items()}
+ items_added_after = {item.path: item.added for item in album.items()}
for item_path, added_after in items_added_after.items():
- self.assertEqualTimes(items_added_before[item_path], added_after,
- "reimport modified Item.added for " +
- displayable_path(item_path))
+ self.assertEqualTimes(
+ items_added_before[item_path],
+ added_after,
+ "reimport modified Item.added for "
+ + displayable_path(item_path),
+ )
def test_import_singletons_with_added_dates(self):
- self.config['import']['singletons'] = True
+ self.config["import"]["singletons"] = True
self.importer.run()
for item in self.lib.items():
mfile = self.find_media_file(item)
self.assertEqualTimes(item.added, os.path.getmtime(mfile.path))
def test_import_singletons_with_preserved_mtimes(self):
- self.config['import']['singletons'] = True
- self.config['importadded']['preserve_mtimes'] = True
+ self.config["import"]["singletons"] = True
+ self.config["importadded"]["preserve_mtimes"] = True
self.importer.run()
for item in self.lib.items():
mediafile_mtime = os.path.getmtime(self.find_media_file(item).path)
self.assertEqualTimes(item.added, mediafile_mtime)
self.assertEqualTimes(item.mtime, mediafile_mtime)
- self.assertEqualTimes(os.path.getmtime(item.path),
- mediafile_mtime)
+ self.assertEqualTimes(os.path.getmtime(item.path), mediafile_mtime)
def test_reimported_singletons_skipped(self):
- self.config['import']['singletons'] = True
+ self.config["import"]["singletons"] = True
# Import and record the original added dates
self.importer.run()
- items_added_before = {item.path: item.added
- for item in self.lib.items()}
+ items_added_before = {
+ item.path: item.added for item in self.lib.items()
+ }
# Newer Item path mtimes as if Beets had modified them
modify_mtimes(items_added_before.keys(), offset=10000)
# Reimport
@@ -159,17 +160,19 @@ def test_reimported_singletons_skipped(self):
self._setup_import_session(import_dir=import_dir, singletons=True)
self.importer.run()
# Verify the reimported items
- items_added_after = {item.path: item.added
- for item in self.lib.items()}
+ items_added_after = {item.path: item.added for item in self.lib.items()}
for item_path, added_after in items_added_after.items():
- self.assertEqualTimes(items_added_before[item_path], added_after,
- "reimport modified Item.added for " +
- displayable_path(item_path))
+ self.assertEqualTimes(
+ items_added_before[item_path],
+ added_after,
+ "reimport modified Item.added for "
+ + displayable_path(item_path),
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_importfeeds.py b/test/plugins/test_importfeeds.py
index 981fa6e6ad..7d95a150b4 100644
--- a/test/plugins/test_importfeeds.py
+++ b/test/plugins/test_importfeeds.py
@@ -1,73 +1,75 @@
+import datetime
import os
import os.path
-import tempfile
import shutil
+import tempfile
import unittest
-import datetime
from beets import config
-from beets.library import Item, Album, Library
+from beets.library import Album, Item, Library
from beetsplug.importfeeds import ImportFeedsPlugin
class ImportfeedsTestTest(unittest.TestCase):
-
def setUp(self):
config.clear()
config.read(user=False)
self.importfeeds = ImportFeedsPlugin()
- self.lib = Library(':memory:')
+ self.lib = Library(":memory:")
self.feeds_dir = tempfile.mkdtemp()
- config['importfeeds']['dir'] = self.feeds_dir
+ config["importfeeds"]["dir"] = self.feeds_dir
def tearDown(self):
shutil.rmtree(self.feeds_dir)
def test_multi_format_album_playlist(self):
- config['importfeeds']['formats'] = 'm3u_multi'
- album = Album(album='album/name', id=1)
- item_path = os.path.join('path', 'to', 'item')
- item = Item(title='song', album_id=1, path=item_path)
+ config["importfeeds"]["formats"] = "m3u_multi"
+ album = Album(album="album/name", id=1)
+ item_path = os.path.join("path", "to", "item")
+ item = Item(title="song", album_id=1, path=item_path)
self.lib.add(album)
self.lib.add(item)
self.importfeeds.album_imported(self.lib, album)
- playlist_path = os.path.join(self.feeds_dir,
- os.listdir(self.feeds_dir)[0])
- self.assertTrue(playlist_path.endswith('album_name.m3u'))
+ playlist_path = os.path.join(
+ self.feeds_dir, os.listdir(self.feeds_dir)[0]
+ )
+ self.assertTrue(playlist_path.endswith("album_name.m3u"))
with open(playlist_path) as playlist:
self.assertIn(item_path, playlist.read())
def test_playlist_in_subdir(self):
- config['importfeeds']['formats'] = 'm3u'
- config['importfeeds']['m3u_name'] = \
- os.path.join('subdir', 'imported.m3u')
- album = Album(album='album/name', id=1)
- item_path = os.path.join('path', 'to', 'item')
- item = Item(title='song', album_id=1, path=item_path)
+ config["importfeeds"]["formats"] = "m3u"
+ config["importfeeds"]["m3u_name"] = os.path.join(
+ "subdir", "imported.m3u"
+ )
+ album = Album(album="album/name", id=1)
+ item_path = os.path.join("path", "to", "item")
+ item = Item(title="song", album_id=1, path=item_path)
self.lib.add(album)
self.lib.add(item)
self.importfeeds.album_imported(self.lib, album)
- playlist = os.path.join(self.feeds_dir,
- config['importfeeds']['m3u_name'].get())
+ playlist = os.path.join(
+ self.feeds_dir, config["importfeeds"]["m3u_name"].get()
+ )
playlist_subdir = os.path.dirname(playlist)
self.assertTrue(os.path.isdir(playlist_subdir))
self.assertTrue(os.path.isfile(playlist))
def test_playlist_per_session(self):
- config['importfeeds']['formats'] = 'm3u_session'
- config['importfeeds']['m3u_name'] = 'imports.m3u'
- album = Album(album='album/name', id=1)
- item_path = os.path.join('path', 'to', 'item')
- item = Item(title='song', album_id=1, path=item_path)
+ config["importfeeds"]["formats"] = "m3u_session"
+ config["importfeeds"]["m3u_name"] = "imports.m3u"
+ album = Album(album="album/name", id=1)
+ item_path = os.path.join("path", "to", "item")
+ item = Item(title="song", album_id=1, path=item_path)
self.lib.add(album)
self.lib.add(item)
self.importfeeds.import_begin(self)
self.importfeeds.album_imported(self.lib, album)
date = datetime.datetime.now().strftime("%Y%m%d_%Hh%M")
- playlist = os.path.join(self.feeds_dir, f'imports_{date}.m3u')
+ playlist = os.path.join(self.feeds_dir, f"imports_{date}.m3u")
self.assertTrue(os.path.isfile(playlist))
with open(playlist) as playlist_contents:
self.assertIn(item_path, playlist_contents.read())
@@ -76,5 +78,6 @@ def test_playlist_per_session(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_info.py b/test/plugins/test_info.py
index 929a83d800..64308e8b11 100644
--- a/test/plugins/test_info.py
+++ b/test/plugins/test_info.py
@@ -17,14 +17,14 @@
from test.helper import TestHelper
from mediafile import MediaFile
+
from beets.util import displayable_path
class InfoTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_beets()
- self.load_plugins('info')
+ self.load_plugins("info")
def tearDown(self):
self.unload_plugins()
@@ -34,99 +34,104 @@ def test_path(self):
path = self.create_mediafile_fixture()
mediafile = MediaFile(path)
- mediafile.albumartist = 'AAA'
- mediafile.disctitle = 'DDD'
- mediafile.genres = ['a', 'b', 'c']
+ mediafile.albumartist = "AAA"
+ mediafile.disctitle = "DDD"
+ mediafile.genres = ["a", "b", "c"]
mediafile.composer = None
mediafile.save()
- out = self.run_with_output('info', path)
+ out = self.run_with_output("info", path)
self.assertIn(displayable_path(path), out)
- self.assertIn('albumartist: AAA', out)
- self.assertIn('disctitle: DDD', out)
- self.assertIn('genres: a; b; c', out)
- self.assertNotIn('composer:', out)
+ self.assertIn("albumartist: AAA", out)
+ self.assertIn("disctitle: DDD", out)
+ self.assertIn("genres: a; b; c", out)
+ self.assertNotIn("composer:", out)
self.remove_mediafile_fixtures()
def test_item_query(self):
item1, item2 = self.add_item_fixtures(count=2)
- item1.album = 'xxxx'
+ item1.album = "xxxx"
item1.write()
- item1.album = 'yyyy'
+ item1.album = "yyyy"
item1.store()
- out = self.run_with_output('info', 'album:yyyy')
+ out = self.run_with_output("info", "album:yyyy")
self.assertIn(displayable_path(item1.path), out)
- self.assertIn('album: xxxx', out)
+ self.assertIn("album: xxxx", out)
self.assertNotIn(displayable_path(item2.path), out)
def test_item_library_query(self):
- item, = self.add_item_fixtures()
- item.album = 'xxxx'
+ (item,) = self.add_item_fixtures()
+ item.album = "xxxx"
item.store()
- out = self.run_with_output('info', '--library', 'album:xxxx')
+ out = self.run_with_output("info", "--library", "album:xxxx")
self.assertIn(displayable_path(item.path), out)
- self.assertIn('album: xxxx', out)
+ self.assertIn("album: xxxx", out)
def test_collect_item_and_path(self):
path = self.create_mediafile_fixture()
mediafile = MediaFile(path)
- item, = self.add_item_fixtures()
+ (item,) = self.add_item_fixtures()
- item.album = mediafile.album = 'AAA'
+ item.album = mediafile.album = "AAA"
item.tracktotal = mediafile.tracktotal = 5
- item.title = 'TTT'
- mediafile.title = 'SSS'
+ item.title = "TTT"
+ mediafile.title = "SSS"
item.write()
item.store()
mediafile.save()
- out = self.run_with_output('info', '--summarize', 'album:AAA', path)
- self.assertIn('album: AAA', out)
- self.assertIn('tracktotal: 5', out)
- self.assertIn('title: [various]', out)
+ out = self.run_with_output("info", "--summarize", "album:AAA", path)
+ self.assertIn("album: AAA", out)
+ self.assertIn("tracktotal: 5", out)
+ self.assertIn("title: [various]", out)
self.remove_mediafile_fixtures()
def test_collect_item_and_path_with_multi_values(self):
path = self.create_mediafile_fixture()
mediafile = MediaFile(path)
- item, = self.add_item_fixtures()
+ (item,) = self.add_item_fixtures()
- item.album = mediafile.album = 'AAA'
+ item.album = mediafile.album = "AAA"
item.tracktotal = mediafile.tracktotal = 5
- item.title = 'TTT'
- mediafile.title = 'SSS'
+ item.title = "TTT"
+ mediafile.title = "SSS"
- item.albumartists = ['Artist A', 'Artist B']
- mediafile.albumartists = ['Artist C', 'Artist D']
+ item.albumartists = ["Artist A", "Artist B"]
+ mediafile.albumartists = ["Artist C", "Artist D"]
- item.artists = ['Artist A', 'Artist Z']
- mediafile.artists = ['Artist A', 'Artist Z']
+ item.artists = ["Artist A", "Artist Z"]
+ mediafile.artists = ["Artist A", "Artist Z"]
item.write()
item.store()
mediafile.save()
- out = self.run_with_output('info', '--summarize', 'album:AAA', path)
- self.assertIn('album: AAA', out)
- self.assertIn('tracktotal: 5', out)
- self.assertIn('title: [various]', out)
- self.assertIn('albumartists: [various]', out)
- self.assertIn('artists: Artist A; Artist Z', out)
+ out = self.run_with_output("info", "--summarize", "album:AAA", path)
+ self.assertIn("album: AAA", out)
+ self.assertIn("tracktotal: 5", out)
+ self.assertIn("title: [various]", out)
+ self.assertIn("albumartists: [various]", out)
+ self.assertIn("artists: Artist A; Artist Z", out)
self.remove_mediafile_fixtures()
def test_custom_format(self):
self.add_item_fixtures()
- out = self.run_with_output('info', '--library', '--format',
- '$track. $title - $artist ($length)')
- self.assertEqual('02. tïtle 0 - the artist (0:01)\n', out)
+ out = self.run_with_output(
+ "info",
+ "--library",
+ "--format",
+ "$track. $title - $artist ($length)",
+ )
+ self.assertEqual("02. tïtle 0 - the artist (0:01)\n", out)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_ipfs.py b/test/plugins/test_ipfs.py
index 593a01b8f5..f4fc7c8bf4 100644
--- a/test/plugins/test_ipfs.py
+++ b/test/plugins/test_ipfs.py
@@ -12,25 +12,22 @@
# included in all copies or substantial portions of the Software.
-from unittest.mock import patch, Mock
-
-from beets import library
-from beets.util import bytestring_path, _fsencoding
-from beetsplug.ipfs import IPFSPlugin
-
-import unittest
import os
-
+import unittest
from test import _common
from test.helper import TestHelper
+from unittest.mock import Mock, patch
+
+from beets import library
+from beets.util import _fsencoding, bytestring_path
+from beetsplug.ipfs import IPFSPlugin
-@patch('beets.util.command_output', Mock())
+@patch("beets.util.command_output", Mock())
class IPFSPluginTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_beets()
- self.load_plugins('ipfs')
+ self.load_plugins("ipfs")
self.lib = library.Library(":memory:")
def tearDown(self):
@@ -47,16 +44,16 @@ def test_stored_hashes(self):
want_item = test_album.items()[2]
for check_item in added_album.items():
try:
- if check_item.get('ipfs', with_album=False):
+ if check_item.get("ipfs", with_album=False):
ipfs_item = os.path.basename(want_item.path).decode(
_fsencoding(),
)
- want_path = '/ipfs/{}/{}'.format(test_album.ipfs,
- ipfs_item)
+ want_path = "/ipfs/{}/{}".format(test_album.ipfs, ipfs_item)
want_path = bytestring_path(want_path)
self.assertEqual(check_item.path, want_path)
- self.assertEqual(check_item.get('ipfs', with_album=False),
- want_item.ipfs)
+ self.assertEqual(
+ check_item.get("ipfs", with_album=False), want_item.ipfs
+ )
self.assertEqual(check_item.title, want_item.title)
found = True
except AttributeError:
@@ -65,22 +62,22 @@ def test_stored_hashes(self):
def mk_test_album(self):
items = [_common.item() for _ in range(3)]
- items[0].title = 'foo bar'
- items[0].artist = '1one'
- items[0].album = 'baz'
+ items[0].title = "foo bar"
+ items[0].artist = "1one"
+ items[0].album = "baz"
items[0].year = 2001
items[0].comp = True
- items[1].title = 'baz qux'
- items[1].artist = '2two'
- items[1].album = 'baz'
+ items[1].title = "baz qux"
+ items[1].artist = "2two"
+ items[1].album = "baz"
items[1].year = 2002
items[1].comp = True
- items[2].title = 'beets 4 eva'
- items[2].artist = '3three'
- items[2].album = 'foo'
+ items[2].title = "beets 4 eva"
+ items[2].artist = "3three"
+ items[2].album = "foo"
items[2].year = 2003
items[2].comp = False
- items[2].ipfs = 'QmfM9ic5LJj7V6ecozFx1MkSoaaiq3PXfhJoFvyqzpLXSk'
+ items[2].ipfs = "QmfM9ic5LJj7V6ecozFx1MkSoaaiq3PXfhJoFvyqzpLXSk"
for item in items:
self.lib.add(item)
@@ -96,5 +93,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_keyfinder.py b/test/plugins/test_keyfinder.py
index 4fb77e3de8..4827fd3d76 100644
--- a/test/plugins/test_keyfinder.py
+++ b/test/plugins/test_keyfinder.py
@@ -13,36 +13,36 @@
# included in all copies or substantial portions of the Software.
-from unittest.mock import patch
import unittest
from test.helper import TestHelper
+from unittest.mock import patch
-from beets.library import Item
from beets import util
+from beets.library import Item
-@patch('beets.util.command_output')
+@patch("beets.util.command_output")
class KeyFinderTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_beets()
- self.load_plugins('keyfinder')
+ self.load_plugins("keyfinder")
def tearDown(self):
self.teardown_beets()
self.unload_plugins()
def test_add_key(self, command_output):
- item = Item(path='/file')
+ item = Item(path="/file")
item.add(self.lib)
command_output.return_value = util.CommandOutput(b"dbm", b"")
- self.run_command('keyfinder')
+ self.run_command("keyfinder")
item.load()
- self.assertEqual(item['initial_key'], 'C#m')
+ self.assertEqual(item["initial_key"], "C#m")
command_output.assert_called_with(
- ['KeyFinder', '-f', util.syspath(item.path)])
+ ["KeyFinder", "-f", util.syspath(item.path)]
+ )
def test_add_key_on_import(self, command_output):
command_output.return_value = util.CommandOutput(b"dbm", b"")
@@ -50,43 +50,44 @@ def test_add_key_on_import(self, command_output):
importer.run()
item = self.lib.items().get()
- self.assertEqual(item['initial_key'], 'C#m')
+ self.assertEqual(item["initial_key"], "C#m")
def test_force_overwrite(self, command_output):
- self.config['keyfinder']['overwrite'] = True
+ self.config["keyfinder"]["overwrite"] = True
- item = Item(path='/file', initial_key='F')
+ item = Item(path="/file", initial_key="F")
item.add(self.lib)
command_output.return_value = util.CommandOutput(b"C#m", b"")
- self.run_command('keyfinder')
+ self.run_command("keyfinder")
item.load()
- self.assertEqual(item['initial_key'], 'C#m')
+ self.assertEqual(item["initial_key"], "C#m")
def test_do_not_overwrite(self, command_output):
- item = Item(path='/file', initial_key='F')
+ item = Item(path="/file", initial_key="F")
item.add(self.lib)
command_output.return_value = util.CommandOutput(b"dbm", b"")
- self.run_command('keyfinder')
+ self.run_command("keyfinder")
item.load()
- self.assertEqual(item['initial_key'], 'F')
+ self.assertEqual(item["initial_key"], "F")
def test_no_key(self, command_output):
- item = Item(path='/file')
+ item = Item(path="/file")
item.add(self.lib)
command_output.return_value = util.CommandOutput(b"", b"")
- self.run_command('keyfinder')
+ self.run_command("keyfinder")
item.load()
- self.assertEqual(item['initial_key'], None)
+ self.assertEqual(item["initial_key"], None)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_lastgenre.py b/test/plugins/test_lastgenre.py
index a7a939caae..ee8f1d4a25 100644
--- a/test/plugins/test_lastgenre.py
+++ b/test/plugins/test_lastgenre.py
@@ -16,13 +16,12 @@
import unittest
+from test import _common
+from test.helper import TestHelper
from unittest.mock import Mock
-from test import _common
-from beetsplug import lastgenre
from beets import config
-
-from test.helper import TestHelper
+from beetsplug import lastgenre
class LastGenrePluginTest(unittest.TestCase, TestHelper):
@@ -33,107 +32,101 @@ def setUp(self):
def tearDown(self):
self.teardown_beets()
- def _setup_config(self, whitelist=False, canonical=False, count=1,
- prefer_specific=False):
- config['lastgenre']['canonical'] = canonical
- config['lastgenre']['count'] = count
- config['lastgenre']['prefer_specific'] = prefer_specific
+ def _setup_config(
+ self, whitelist=False, canonical=False, count=1, prefer_specific=False
+ ):
+ config["lastgenre"]["canonical"] = canonical
+ config["lastgenre"]["count"] = count
+ config["lastgenre"]["prefer_specific"] = prefer_specific
if isinstance(whitelist, (bool, (str,))):
# Filename, default, or disabled.
- config['lastgenre']['whitelist'] = whitelist
+ config["lastgenre"]["whitelist"] = whitelist
self.plugin.setup()
if not isinstance(whitelist, (bool, (str,))):
# Explicit list of genres.
self.plugin.whitelist = whitelist
def test_default(self):
- """Fetch genres with whitelist and c14n deactivated
- """
+ """Fetch genres with whitelist and c14n deactivated"""
self._setup_config()
- self.assertEqual(self.plugin._resolve_genres(['delta blues']),
- 'Delta Blues')
+ self.assertEqual(
+ self.plugin._resolve_genres(["delta blues"]), "Delta Blues"
+ )
def test_c14n_only(self):
"""Default c14n tree funnels up to most common genre except for *wrong*
genres that stay unchanged.
"""
self._setup_config(canonical=True, count=99)
- self.assertEqual(self.plugin._resolve_genres(['delta blues']),
- 'Blues')
- self.assertEqual(self.plugin._resolve_genres(['iota blues']),
- 'Iota Blues')
+ self.assertEqual(self.plugin._resolve_genres(["delta blues"]), "Blues")
+ self.assertEqual(
+ self.plugin._resolve_genres(["iota blues"]), "Iota Blues"
+ )
def test_whitelist_only(self):
- """Default whitelist rejects *wrong* (non existing) genres.
- """
+ """Default whitelist rejects *wrong* (non existing) genres."""
self._setup_config(whitelist=True)
- self.assertEqual(self.plugin._resolve_genres(['iota blues']),
- '')
+ self.assertEqual(self.plugin._resolve_genres(["iota blues"]), "")
def test_whitelist_c14n(self):
"""Default whitelist and c14n both activated result in all parents
genres being selected (from specific to common).
"""
self._setup_config(canonical=True, whitelist=True, count=99)
- self.assertEqual(self.plugin._resolve_genres(['delta blues']),
- 'Delta Blues, Blues')
+ self.assertEqual(
+ self.plugin._resolve_genres(["delta blues"]), "Delta Blues, Blues"
+ )
def test_whitelist_custom(self):
- """Keep only genres that are in the whitelist.
- """
- self._setup_config(whitelist={'blues', 'rock', 'jazz'},
- count=2)
- self.assertEqual(self.plugin._resolve_genres(['pop', 'blues']),
- 'Blues')
+ """Keep only genres that are in the whitelist."""
+ self._setup_config(whitelist={"blues", "rock", "jazz"}, count=2)
+ self.assertEqual(self.plugin._resolve_genres(["pop", "blues"]), "Blues")
- self._setup_config(canonical='', whitelist={'rock'})
- self.assertEqual(self.plugin._resolve_genres(['delta blues']),
- '')
+ self._setup_config(canonical="", whitelist={"rock"})
+ self.assertEqual(self.plugin._resolve_genres(["delta blues"]), "")
def test_count(self):
"""Keep the n first genres, as we expect them to be sorted from more to
less popular.
"""
- self._setup_config(whitelist={'blues', 'rock', 'jazz'},
- count=2)
- self.assertEqual(self.plugin._resolve_genres(
- ['jazz', 'pop', 'rock', 'blues']),
- 'Jazz, Rock')
+ self._setup_config(whitelist={"blues", "rock", "jazz"}, count=2)
+ self.assertEqual(
+ self.plugin._resolve_genres(["jazz", "pop", "rock", "blues"]),
+ "Jazz, Rock",
+ )
def test_count_c14n(self):
- """Keep the n first genres, after having applied c14n when necessary
- """
- self._setup_config(whitelist={'blues', 'rock', 'jazz'},
- canonical=True,
- count=2)
+ """Keep the n first genres, after having applied c14n when necessary"""
+ self._setup_config(
+ whitelist={"blues", "rock", "jazz"}, canonical=True, count=2
+ )
# thanks to c14n, 'blues' superseeds 'country blues' and takes the
# second slot
- self.assertEqual(self.plugin._resolve_genres(
- ['jazz', 'pop', 'country blues', 'rock']),
- 'Jazz, Blues')
+ self.assertEqual(
+ self.plugin._resolve_genres(
+ ["jazz", "pop", "country blues", "rock"]
+ ),
+ "Jazz, Blues",
+ )
def test_c14n_whitelist(self):
- """Genres first pass through c14n and are then filtered
- """
- self._setup_config(canonical=True, whitelist={'rock'})
- self.assertEqual(self.plugin._resolve_genres(['delta blues']),
- '')
+ """Genres first pass through c14n and are then filtered"""
+ self._setup_config(canonical=True, whitelist={"rock"})
+ self.assertEqual(self.plugin._resolve_genres(["delta blues"]), "")
def test_empty_string_enables_canonical(self):
"""For backwards compatibility, setting the `canonical` option
to the empty string enables it using the default tree.
"""
- self._setup_config(canonical='', count=99)
- self.assertEqual(self.plugin._resolve_genres(['delta blues']),
- 'Blues')
+ self._setup_config(canonical="", count=99)
+ self.assertEqual(self.plugin._resolve_genres(["delta blues"]), "Blues")
def test_empty_string_enables_whitelist(self):
"""Again for backwards compatibility, setting the `whitelist`
option to the empty string enables the default set of genres.
"""
- self._setup_config(whitelist='')
- self.assertEqual(self.plugin._resolve_genres(['iota blues']),
- '')
+ self._setup_config(whitelist="")
+ self.assertEqual(self.plugin._resolve_genres(["iota blues"]), "")
def test_prefer_specific_loads_tree(self):
"""When prefer_specific is enabled but canonical is not the
@@ -143,19 +136,19 @@ def test_prefer_specific_loads_tree(self):
self.assertNotEqual(self.plugin.c14n_branches, [])
def test_prefer_specific_without_canonical(self):
- """Prefer_specific works without canonical.
- """
+ """Prefer_specific works without canonical."""
self._setup_config(prefer_specific=True, canonical=False, count=4)
- self.assertEqual(self.plugin._resolve_genres(
- ['math rock', 'post-rock']),
- 'Post-Rock, Math Rock')
+ self.assertEqual(
+ self.plugin._resolve_genres(["math rock", "post-rock"]),
+ "Post-Rock, Math Rock",
+ )
def test_no_duplicate(self):
- """Remove duplicated genres.
- """
+ """Remove duplicated genres."""
self._setup_config(count=99)
- self.assertEqual(self.plugin._resolve_genres(['blues', 'blues']),
- 'Blues')
+ self.assertEqual(
+ self.plugin._resolve_genres(["blues", "blues"]), "Blues"
+ )
def test_tags_for(self):
class MockPylastElem:
@@ -169,29 +162,29 @@ class MockPylastObj:
def get_top_tags(self):
tag1 = Mock()
tag1.weight = 90
- tag1.item = MockPylastElem('Pop')
+ tag1.item = MockPylastElem("Pop")
tag2 = Mock()
tag2.weight = 40
- tag2.item = MockPylastElem('Rap')
+ tag2.item = MockPylastElem("Rap")
return [tag1, tag2]
plugin = lastgenre.LastGenrePlugin()
res = plugin._tags_for(MockPylastObj())
- self.assertEqual(res, ['pop', 'rap'])
+ self.assertEqual(res, ["pop", "rap"])
res = plugin._tags_for(MockPylastObj(), min_weight=50)
- self.assertEqual(res, ['pop'])
+ self.assertEqual(res, ["pop"])
def test_get_genre(self):
- mock_genres = {'track': '1', 'album': '2', 'artist': '3'}
+ mock_genres = {"track": "1", "album": "2", "artist": "3"}
def mock_fetch_track_genre(self, obj=None):
- return mock_genres['track']
+ return mock_genres["track"]
def mock_fetch_album_genre(self, obj):
- return mock_genres['album']
+ return mock_genres["album"]
def mock_fetch_artist_genre(self, obj):
- return mock_genres['artist']
+ return mock_genres["artist"]
lastgenre.LastGenrePlugin.fetch_track_genre = mock_fetch_track_genre
lastgenre.LastGenrePlugin.fetch_album_genre = mock_fetch_album_genre
@@ -199,49 +192,52 @@ def mock_fetch_artist_genre(self, obj):
self._setup_config(whitelist=False)
item = _common.item()
- item.genre = mock_genres['track']
+ item.genre = mock_genres["track"]
- config['lastgenre'] = {'force': False}
+ config["lastgenre"] = {"force": False}
res = self.plugin._get_genre(item)
- self.assertEqual(res, (item.genre, 'keep'))
+ self.assertEqual(res, (item.genre, "keep"))
- config['lastgenre'] = {'force': True, 'source': 'track'}
+ config["lastgenre"] = {"force": True, "source": "track"}
res = self.plugin._get_genre(item)
- self.assertEqual(res, (mock_genres['track'], 'track'))
+ self.assertEqual(res, (mock_genres["track"], "track"))
- config['lastgenre'] = {'source': 'album'}
+ config["lastgenre"] = {"source": "album"}
res = self.plugin._get_genre(item)
- self.assertEqual(res, (mock_genres['album'], 'album'))
+ self.assertEqual(res, (mock_genres["album"], "album"))
- config['lastgenre'] = {'source': 'artist'}
+ config["lastgenre"] = {"source": "artist"}
res = self.plugin._get_genre(item)
- self.assertEqual(res, (mock_genres['artist'], 'artist'))
+ self.assertEqual(res, (mock_genres["artist"], "artist"))
- mock_genres['artist'] = None
+ mock_genres["artist"] = None
res = self.plugin._get_genre(item)
- self.assertEqual(res, (item.genre, 'original'))
+ self.assertEqual(res, (item.genre, "original"))
- config['lastgenre'] = {'fallback': 'rap'}
+ config["lastgenre"] = {"fallback": "rap"}
item.genre = None
res = self.plugin._get_genre(item)
- self.assertEqual(res, (config['lastgenre']['fallback'].get(),
- 'fallback'))
+ self.assertEqual(
+ res, (config["lastgenre"]["fallback"].get(), "fallback")
+ )
def test_sort_by_depth(self):
self._setup_config(canonical=True)
# Normal case.
- tags = ('electronic', 'ambient', 'post-rock', 'downtempo')
+ tags = ("electronic", "ambient", "post-rock", "downtempo")
res = self.plugin._sort_by_depth(tags)
self.assertEqual(
- res, ['post-rock', 'downtempo', 'ambient', 'electronic'])
+ res, ["post-rock", "downtempo", "ambient", "electronic"]
+ )
# Non-canonical tag ('chillout') present.
- tags = ('electronic', 'ambient', 'chillout')
+ tags = ("electronic", "ambient", "chillout")
res = self.plugin._sort_by_depth(tags)
- self.assertEqual(res, ['ambient', 'electronic'])
+ self.assertEqual(res, ["ambient", "electronic"])
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_limit.py b/test/plugins/test_limit.py
index 35c01c41a7..2e2fb38c37 100644
--- a/test/plugins/test_limit.py
+++ b/test/plugins/test_limit.py
@@ -14,7 +14,6 @@
"""Tests for the 'limit' plugin."""
import unittest
-
from test.helper import TestHelper
@@ -25,15 +24,15 @@ class LimitPluginTest(unittest.TestCase, TestHelper):
"""
def setUp(self):
-
self.setup_beets()
self.load_plugins("limit")
# we'll create an even number of tracks in the library
self.num_test_items = 10
assert self.num_test_items % 2 == 0
- for item_no, item in \
- enumerate(self.add_item_fixtures(count=self.num_test_items)):
+ for item_no, item in enumerate(
+ self.add_item_fixtures(count=self.num_test_items)
+ ):
item.track = item_no + 1
item.store()
@@ -68,13 +67,15 @@ def test_lslimit_tail(self):
def test_lslimit_head_invariant(self):
"""Returns the expected number with `lslimit --head` and a filter."""
result = self.run_with_output(
- "lslimit", "--head", str(self.num_limit), self.track_tail_range)
+ "lslimit", "--head", str(self.num_limit), self.track_tail_range
+ )
self.assertEqual(result.count("\n"), self.num_limit)
def test_lslimit_tail_invariant(self):
"""Returns the expected number with `lslimit --tail` and a filter."""
result = self.run_with_output(
- "lslimit", "--tail", str(self.num_limit), self.track_head_range)
+ "lslimit", "--tail", str(self.num_limit), self.track_head_range
+ )
self.assertEqual(result.count("\n"), self.num_limit)
def test_prefix(self):
@@ -101,5 +102,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_lyrics.py b/test/plugins/test_lyrics.py
index a5197182a8..e54223786e 100644
--- a/test/plugins/test_lyrics.py
+++ b/test/plugins/test_lyrics.py
@@ -19,18 +19,17 @@
import os
import re
import unittest
+from test import _common
+from unittest.mock import MagicMock, patch
import confuse
-from unittest.mock import MagicMock, patch
from beets import logging
from beets.library import Item
from beets.util import bytestring_path
from beetsplug import lyrics
-from test import _common
-
-log = logging.getLogger('beets.test_lyrics')
+log = logging.getLogger("beets.test_lyrics")
raw_backend = lyrics.Backend({}, log)
google = lyrics.Google(MagicMock(), log)
genius = lyrics.Genius(MagicMock(), log)
@@ -38,127 +37,111 @@
class LyricsPluginTest(unittest.TestCase):
-
def setUp(self):
"""Set up configuration."""
lyrics.LyricsPlugin()
def test_search_artist(self):
- item = Item(artist='Alice ft. Bob', title='song')
- self.assertIn(('Alice ft. Bob', ['song']),
- lyrics.search_pairs(item))
- self.assertIn(('Alice', ['song']),
- lyrics.search_pairs(item))
-
- item = Item(artist='Alice feat Bob', title='song')
- self.assertIn(('Alice feat Bob', ['song']),
- lyrics.search_pairs(item))
- self.assertIn(('Alice', ['song']),
- lyrics.search_pairs(item))
-
- item = Item(artist='Alice feat. Bob', title='song')
- self.assertIn(('Alice feat. Bob', ['song']),
- lyrics.search_pairs(item))
- self.assertIn(('Alice', ['song']),
- lyrics.search_pairs(item))
-
- item = Item(artist='Alice feats Bob', title='song')
- self.assertIn(('Alice feats Bob', ['song']),
- lyrics.search_pairs(item))
- self.assertNotIn(('Alice', ['song']),
- lyrics.search_pairs(item))
-
- item = Item(artist='Alice featuring Bob', title='song')
- self.assertIn(('Alice featuring Bob', ['song']),
- lyrics.search_pairs(item))
- self.assertIn(('Alice', ['song']),
- lyrics.search_pairs(item))
-
- item = Item(artist='Alice & Bob', title='song')
- self.assertIn(('Alice & Bob', ['song']),
- lyrics.search_pairs(item))
- self.assertIn(('Alice', ['song']),
- lyrics.search_pairs(item))
-
- item = Item(artist='Alice and Bob', title='song')
- self.assertIn(('Alice and Bob', ['song']),
- lyrics.search_pairs(item))
- self.assertIn(('Alice', ['song']),
- lyrics.search_pairs(item))
-
- item = Item(artist='Alice and Bob', title='song')
- self.assertEqual(('Alice and Bob', ['song']),
- list(lyrics.search_pairs(item))[0])
+ item = Item(artist="Alice ft. Bob", title="song")
+ self.assertIn(("Alice ft. Bob", ["song"]), lyrics.search_pairs(item))
+ self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item))
+
+ item = Item(artist="Alice feat Bob", title="song")
+ self.assertIn(("Alice feat Bob", ["song"]), lyrics.search_pairs(item))
+ self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item))
+
+ item = Item(artist="Alice feat. Bob", title="song")
+ self.assertIn(("Alice feat. Bob", ["song"]), lyrics.search_pairs(item))
+ self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item))
+
+ item = Item(artist="Alice feats Bob", title="song")
+ self.assertIn(("Alice feats Bob", ["song"]), lyrics.search_pairs(item))
+ self.assertNotIn(("Alice", ["song"]), lyrics.search_pairs(item))
+
+ item = Item(artist="Alice featuring Bob", title="song")
+ self.assertIn(
+ ("Alice featuring Bob", ["song"]), lyrics.search_pairs(item)
+ )
+ self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item))
+
+ item = Item(artist="Alice & Bob", title="song")
+ self.assertIn(("Alice & Bob", ["song"]), lyrics.search_pairs(item))
+ self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item))
+
+ item = Item(artist="Alice and Bob", title="song")
+ self.assertIn(("Alice and Bob", ["song"]), lyrics.search_pairs(item))
+ self.assertIn(("Alice", ["song"]), lyrics.search_pairs(item))
+
+ item = Item(artist="Alice and Bob", title="song")
+ self.assertEqual(
+ ("Alice and Bob", ["song"]), list(lyrics.search_pairs(item))[0]
+ )
def test_search_artist_sort(self):
- item = Item(artist='CHVRCHΞS', title='song', artist_sort='CHVRCHES')
- self.assertIn(('CHVRCHΞS', ['song']),
- lyrics.search_pairs(item))
- self.assertIn(('CHVRCHES', ['song']),
- lyrics.search_pairs(item))
+ item = Item(artist="CHVRCHΞS", title="song", artist_sort="CHVRCHES")
+ self.assertIn(("CHVRCHΞS", ["song"]), lyrics.search_pairs(item))
+ self.assertIn(("CHVRCHES", ["song"]), lyrics.search_pairs(item))
# Make sure that the original artist name is still the first entry
- self.assertEqual(('CHVRCHΞS', ['song']),
- list(lyrics.search_pairs(item))[0])
+ self.assertEqual(
+ ("CHVRCHΞS", ["song"]), list(lyrics.search_pairs(item))[0]
+ )
- item = Item(artist='横山克', title='song',
- artist_sort='Masaru Yokoyama')
- self.assertIn(('横山克', ['song']),
- lyrics.search_pairs(item))
- self.assertIn(('Masaru Yokoyama', ['song']),
- lyrics.search_pairs(item))
+ item = Item(artist="横山克", title="song", artist_sort="Masaru Yokoyama")
+ self.assertIn(("横山克", ["song"]), lyrics.search_pairs(item))
+ self.assertIn(("Masaru Yokoyama", ["song"]), lyrics.search_pairs(item))
# Make sure that the original artist name is still the first entry
- self.assertEqual(('横山克', ['song']),
- list(lyrics.search_pairs(item))[0])
+ self.assertEqual(("横山克", ["song"]), list(lyrics.search_pairs(item))[0])
def test_search_pairs_multi_titles(self):
- item = Item(title='1 / 2', artist='A')
- self.assertIn(('A', ['1 / 2']), lyrics.search_pairs(item))
- self.assertIn(('A', ['1', '2']), lyrics.search_pairs(item))
+ item = Item(title="1 / 2", artist="A")
+ self.assertIn(("A", ["1 / 2"]), lyrics.search_pairs(item))
+ self.assertIn(("A", ["1", "2"]), lyrics.search_pairs(item))
- item = Item(title='1/2', artist='A')
- self.assertIn(('A', ['1/2']), lyrics.search_pairs(item))
- self.assertIn(('A', ['1', '2']), lyrics.search_pairs(item))
+ item = Item(title="1/2", artist="A")
+ self.assertIn(("A", ["1/2"]), lyrics.search_pairs(item))
+ self.assertIn(("A", ["1", "2"]), lyrics.search_pairs(item))
def test_search_pairs_titles(self):
- item = Item(title='Song (live)', artist='A')
- self.assertIn(('A', ['Song']), lyrics.search_pairs(item))
- self.assertIn(('A', ['Song (live)']), lyrics.search_pairs(item))
+ item = Item(title="Song (live)", artist="A")
+ self.assertIn(("A", ["Song"]), lyrics.search_pairs(item))
+ self.assertIn(("A", ["Song (live)"]), lyrics.search_pairs(item))
- item = Item(title='Song (live) (new)', artist='A')
- self.assertIn(('A', ['Song']), lyrics.search_pairs(item))
- self.assertIn(('A', ['Song (live) (new)']), lyrics.search_pairs(item))
+ item = Item(title="Song (live) (new)", artist="A")
+ self.assertIn(("A", ["Song"]), lyrics.search_pairs(item))
+ self.assertIn(("A", ["Song (live) (new)"]), lyrics.search_pairs(item))
- item = Item(title='Song (live (new))', artist='A')
- self.assertIn(('A', ['Song']), lyrics.search_pairs(item))
- self.assertIn(('A', ['Song (live (new))']), lyrics.search_pairs(item))
+ item = Item(title="Song (live (new))", artist="A")
+ self.assertIn(("A", ["Song"]), lyrics.search_pairs(item))
+ self.assertIn(("A", ["Song (live (new))"]), lyrics.search_pairs(item))
- item = Item(title='Song ft. B', artist='A')
- self.assertIn(('A', ['Song']), lyrics.search_pairs(item))
- self.assertIn(('A', ['Song ft. B']), lyrics.search_pairs(item))
+ item = Item(title="Song ft. B", artist="A")
+ self.assertIn(("A", ["Song"]), lyrics.search_pairs(item))
+ self.assertIn(("A", ["Song ft. B"]), lyrics.search_pairs(item))
- item = Item(title='Song featuring B', artist='A')
- self.assertIn(('A', ['Song']), lyrics.search_pairs(item))
- self.assertIn(('A', ['Song featuring B']), lyrics.search_pairs(item))
+ item = Item(title="Song featuring B", artist="A")
+ self.assertIn(("A", ["Song"]), lyrics.search_pairs(item))
+ self.assertIn(("A", ["Song featuring B"]), lyrics.search_pairs(item))
- item = Item(title='Song and B', artist='A')
- self.assertNotIn(('A', ['Song']), lyrics.search_pairs(item))
- self.assertIn(('A', ['Song and B']), lyrics.search_pairs(item))
+ item = Item(title="Song and B", artist="A")
+ self.assertNotIn(("A", ["Song"]), lyrics.search_pairs(item))
+ self.assertIn(("A", ["Song and B"]), lyrics.search_pairs(item))
- item = Item(title='Song: B', artist='A')
- self.assertIn(('A', ['Song']), lyrics.search_pairs(item))
- self.assertIn(('A', ['Song: B']), lyrics.search_pairs(item))
+ item = Item(title="Song: B", artist="A")
+ self.assertIn(("A", ["Song"]), lyrics.search_pairs(item))
+ self.assertIn(("A", ["Song: B"]), lyrics.search_pairs(item))
def test_remove_credits(self):
self.assertEqual(
- lyrics.remove_credits("""It's close to midnight
- Lyrics brought by example.com"""),
- "It's close to midnight"
+ lyrics.remove_credits(
+ """It's close to midnight
+ Lyrics brought by example.com"""
+ ),
+ "It's close to midnight",
)
self.assertEqual(
- lyrics.remove_credits("""Lyrics brought by example.com"""),
- ""
+ lyrics.remove_credits("""Lyrics brought by example.com"""), ""
)
# don't remove 2nd verse for the only reason it contains 'lyrics' word
@@ -168,16 +151,17 @@ def test_remove_credits(self):
self.assertEqual(lyrics.remove_credits(text), text)
def test_is_lyrics(self):
- texts = ['LyricsMania.com - Copyright (c) 2013 - All Rights Reserved']
- texts += ["""All material found on this site is property\n
- of mywickedsongtext brand"""]
+ texts = ["LyricsMania.com - Copyright (c) 2013 - All Rights Reserved"]
+ texts += [
+ """All material found on this site is property\n
+ of mywickedsongtext brand"""
+ ]
for t in texts:
self.assertFalse(google.is_lyrics(t))
def test_slugify(self):
text = "http://site.com/\xe7afe-au_lait(boisson)"
- self.assertEqual(google.slugify(text),
- 'http://site.com/cafe_au_lait')
+ self.assertEqual(google.slugify(text), "http://site.com/cafe_au_lait")
def test_scrape_strip_cruft(self):
text = """
@@ -186,42 +170,43 @@ def test_scrape_strip_cruft(self):
two !
"""
- self.assertEqual(lyrics._scrape_strip_cruft(text, True),
- "one\ntwo !\n\nfour")
+ self.assertEqual(
+ lyrics._scrape_strip_cruft(text, True), "one\ntwo !\n\nfour"
+ )
def test_scrape_strip_scripts(self):
text = """foobaz"""
- self.assertEqual(lyrics._scrape_strip_cruft(text, True),
- "foobaz")
+ self.assertEqual(lyrics._scrape_strip_cruft(text, True), "foobaz")
def test_scrape_strip_tag_in_comment(self):
text = """fooqux"""
- self.assertEqual(lyrics._scrape_strip_cruft(text, True),
- "fooqux")
+ self.assertEqual(lyrics._scrape_strip_cruft(text, True), "fooqux")
def test_scrape_merge_paragraphs(self):
text = "one two
three"
- self.assertEqual(lyrics._scrape_merge_paragraphs(text),
- "one\ntwo\nthree")
+ self.assertEqual(
+ lyrics._scrape_merge_paragraphs(text), "one\ntwo\nthree"
+ )
def test_missing_lyrics(self):
- self.assertFalse(google.is_lyrics(LYRICS_TEXTS['missing_texts']))
+ self.assertFalse(google.is_lyrics(LYRICS_TEXTS["missing_texts"]))
def url_to_filename(url):
- url = re.sub(r'https?://|www.', '', url)
- url = re.sub(r'.html', '', url)
- fn = "".join(x for x in url if (x.isalnum() or x == '/'))
- fn = fn.split('/')
- fn = os.path.join(LYRICS_ROOT_DIR,
- bytestring_path(fn[0]),
- bytestring_path(fn[-1] + '.txt'))
+ url = re.sub(r"https?://|www.", "", url)
+ url = re.sub(r".html", "", url)
+ fn = "".join(x for x in url if (x.isalnum() or x == "/"))
+ fn = fn.split("/")
+ fn = os.path.join(
+ LYRICS_ROOT_DIR,
+ bytestring_path(fn[0]),
+ bytestring_path(fn[-1] + ".txt"),
+ )
return fn
class MockFetchUrl:
-
- def __init__(self, pathval='fetched_path'):
+ def __init__(self, pathval="fetched_path"):
self.pathval = pathval
self.fetched = None
@@ -253,81 +238,106 @@ def assertLyricsContentOk(self, title, text, msg=""): # noqa: N802
self.fail(f"{details} : {msg}")
-LYRICS_ROOT_DIR = os.path.join(_common.RSRC, b'lyrics')
-yaml_path = os.path.join(_common.RSRC, b'lyricstext.yaml')
+LYRICS_ROOT_DIR = os.path.join(_common.RSRC, b"lyrics")
+yaml_path = os.path.join(_common.RSRC, b"lyricstext.yaml")
LYRICS_TEXTS = confuse.load_yaml(yaml_path)
class LyricsGoogleBaseTest(unittest.TestCase):
-
def setUp(self):
"""Set up configuration."""
try:
- __import__('bs4')
+ __import__("bs4")
except ImportError:
- self.skipTest('Beautiful Soup 4 not available')
+ self.skipTest("Beautiful Soup 4 not available")
class LyricsPluginSourcesTest(LyricsGoogleBaseTest, LyricsAssertions):
"""Check that beets google custom search engine sources are correctly
- scraped.
+ scraped.
"""
- DEFAULT_SONG = dict(artist='The Beatles', title='Lady Madonna')
+ DEFAULT_SONG = dict(artist="The Beatles", title="Lady Madonna")
DEFAULT_SOURCES = [
# dict(artist=u'Santana', title=u'Black magic woman',
# backend=lyrics.MusiXmatch),
- dict(DEFAULT_SONG, backend=lyrics.Genius,
- # GitHub actions is on some form of Cloudflare blacklist.
- skip=os.environ.get('GITHUB_ACTIONS') == 'true'),
- dict(artist='Boy In Space', title='u n eye',
- backend=lyrics.Tekstowo),
+ dict(
+ DEFAULT_SONG,
+ backend=lyrics.Genius,
+ # GitHub actions is on some form of Cloudflare blacklist.
+ skip=os.environ.get("GITHUB_ACTIONS") == "true",
+ ),
+ dict(artist="Boy In Space", title="u n eye", backend=lyrics.Tekstowo),
]
GOOGLE_SOURCES = [
- dict(DEFAULT_SONG,
- url='http://www.absolutelyrics.com',
- path='/lyrics/view/the_beatles/lady_madonna'),
- dict(DEFAULT_SONG,
- url='http://www.azlyrics.com',
- path='/lyrics/beatles/ladymadonna.html',
- # AZLyrics returns a 403 on GitHub actions.
- skip=os.environ.get('GITHUB_ACTIONS') == 'true'),
- dict(DEFAULT_SONG,
- url='http://www.chartlyrics.com',
- path='/_LsLsZ7P4EK-F-LD4dJgDQ/Lady+Madonna.aspx'),
+ dict(
+ DEFAULT_SONG,
+ url="http://www.absolutelyrics.com",
+ path="/lyrics/view/the_beatles/lady_madonna",
+ ),
+ dict(
+ DEFAULT_SONG,
+ url="http://www.azlyrics.com",
+ path="/lyrics/beatles/ladymadonna.html",
+ # AZLyrics returns a 403 on GitHub actions.
+ skip=os.environ.get("GITHUB_ACTIONS") == "true",
+ ),
+ dict(
+ DEFAULT_SONG,
+ url="http://www.chartlyrics.com",
+ path="/_LsLsZ7P4EK-F-LD4dJgDQ/Lady+Madonna.aspx",
+ ),
# dict(DEFAULT_SONG,
# url=u'http://www.elyricsworld.com',
# path=u'/lady_madonna_lyrics_beatles.html'),
- dict(url='http://www.lacoccinelle.net',
- artist='Jacques Brel', title="Amsterdam",
- path='/paroles-officielles/275679.html'),
- dict(DEFAULT_SONG,
- url='http://letras.mus.br/', path='the-beatles/275/'),
- dict(DEFAULT_SONG,
- url='http://www.lyricsmania.com/',
- path='lady_madonna_lyrics_the_beatles.html'),
- dict(DEFAULT_SONG,
- url='http://www.lyricsmode.com',
- path='/lyrics/b/beatles/lady_madonna.html'),
- dict(url='http://www.lyricsontop.com',
- artist='Amy Winehouse', title="Jazz'n'blues",
- path='/amy-winehouse-songs/jazz-n-blues-lyrics.html'),
+ dict(
+ url="http://www.lacoccinelle.net",
+ artist="Jacques Brel",
+ title="Amsterdam",
+ path="/paroles-officielles/275679.html",
+ ),
+ dict(
+ DEFAULT_SONG, url="http://letras.mus.br/", path="the-beatles/275/"
+ ),
+ dict(
+ DEFAULT_SONG,
+ url="http://www.lyricsmania.com/",
+ path="lady_madonna_lyrics_the_beatles.html",
+ ),
+ dict(
+ DEFAULT_SONG,
+ url="http://www.lyricsmode.com",
+ path="/lyrics/b/beatles/lady_madonna.html",
+ ),
+ dict(
+ url="http://www.lyricsontop.com",
+ artist="Amy Winehouse",
+ title="Jazz'n'blues",
+ path="/amy-winehouse-songs/jazz-n-blues-lyrics.html",
+ ),
# dict(DEFAULT_SONG,
# url='http://www.metrolyrics.com/',
# path='lady-madonna-lyrics-beatles.html'),
# dict(url='http://www.musica.com/', path='letras.asp?letra=2738',
# artist=u'Santana', title=u'Black magic woman'),
- dict(url='http://www.paroles.net/',
- artist='Lilly Wood & the prick', title="Hey it's ok",
- path='lilly-wood-the-prick/paroles-hey-it-s-ok'),
- dict(DEFAULT_SONG,
- url='http://www.songlyrics.com',
- path='/the-beatles/lady-madonna-lyrics'),
- dict(DEFAULT_SONG,
- url='http://www.sweetslyrics.com',
- path='/761696.The%20Beatles%20-%20Lady%20Madonna.html')
+ dict(
+ url="http://www.paroles.net/",
+ artist="Lilly Wood & the prick",
+ title="Hey it's ok",
+ path="lilly-wood-the-prick/paroles-hey-it-s-ok",
+ ),
+ dict(
+ DEFAULT_SONG,
+ url="http://www.songlyrics.com",
+ path="/the-beatles/lady-madonna-lyrics",
+ ),
+ dict(
+ DEFAULT_SONG,
+ url="http://www.sweetslyrics.com",
+ path="/761696.The%20Beatles%20-%20Lady%20Madonna.html",
+ ),
]
def setUp(self):
@@ -335,8 +345,9 @@ def setUp(self):
self.plugin = lyrics.LyricsPlugin()
@unittest.skipUnless(
- os.environ.get('INTEGRATION_TEST', '0') == '1',
- 'integration testing not enabled')
+ os.environ.get("INTEGRATION_TEST", "0") == "1",
+ "integration testing not enabled",
+ )
def test_backend_sources_ok(self):
"""Test default backends with songs known to exist in respective
databases.
@@ -344,78 +355,93 @@ def test_backend_sources_ok(self):
# Don't test any sources marked as skipped.
sources = [s for s in self.DEFAULT_SOURCES if not s.get("skip", False)]
for s in sources:
- with self.subTest(s['backend'].__name__):
- backend = s['backend'](self.plugin.config, self.plugin._log)
- res = backend.fetch(s['artist'], s['title'])
- self.assertLyricsContentOk(s['title'], res)
+ with self.subTest(s["backend"].__name__):
+ backend = s["backend"](self.plugin.config, self.plugin._log)
+ res = backend.fetch(s["artist"], s["title"])
+ self.assertLyricsContentOk(s["title"], res)
@unittest.skipUnless(
- os.environ.get('INTEGRATION_TEST', '0') == '1',
- 'integration testing not enabled')
+ os.environ.get("INTEGRATION_TEST", "0") == "1",
+ "integration testing not enabled",
+ )
def test_google_sources_ok(self):
"""Test if lyrics present on websites registered in beets google custom
- search engine are correctly scraped.
+ search engine are correctly scraped.
"""
# Don't test any sources marked as skipped.
sources = [s for s in self.GOOGLE_SOURCES if not s.get("skip", False)]
for s in sources:
- url = s['url'] + s['path']
- res = lyrics.scrape_lyrics_from_html(
- raw_backend.fetch_url(url))
+ url = s["url"] + s["path"]
+ res = lyrics.scrape_lyrics_from_html(raw_backend.fetch_url(url))
self.assertTrue(google.is_lyrics(res), url)
- self.assertLyricsContentOk(s['title'], res, url)
+ self.assertLyricsContentOk(s["title"], res, url)
class LyricsGooglePluginMachineryTest(LyricsGoogleBaseTest, LyricsAssertions):
- """Test scraping heuristics on a fake html page.
- """
+ """Test scraping heuristics on a fake html page."""
- source = dict(url='http://www.example.com', artist='John Doe',
- title='Beets song', path='/lyrics/beetssong')
+ source = dict(
+ url="http://www.example.com",
+ artist="John Doe",
+ title="Beets song",
+ path="/lyrics/beetssong",
+ )
def setUp(self):
"""Set up configuration"""
LyricsGoogleBaseTest.setUp(self)
self.plugin = lyrics.LyricsPlugin()
- @patch.object(lyrics.Backend, 'fetch_url', MockFetchUrl())
+ @patch.object(lyrics.Backend, "fetch_url", MockFetchUrl())
def test_mocked_source_ok(self):
"""Test that lyrics of the mocked page are correctly scraped"""
- url = self.source['url'] + self.source['path']
+ url = self.source["url"] + self.source["path"]
res = lyrics.scrape_lyrics_from_html(raw_backend.fetch_url(url))
self.assertTrue(google.is_lyrics(res), url)
- self.assertLyricsContentOk(self.source['title'], res, url)
+ self.assertLyricsContentOk(self.source["title"], res, url)
- @patch.object(lyrics.Backend, 'fetch_url', MockFetchUrl())
+ @patch.object(lyrics.Backend, "fetch_url", MockFetchUrl())
def test_is_page_candidate_exact_match(self):
"""Test matching html page title with song infos -- when song infos are
- present in the title.
+ present in the title.
"""
- from bs4 import SoupStrainer, BeautifulSoup
+ from bs4 import BeautifulSoup, SoupStrainer
+
s = self.source
- url = str(s['url'] + s['path'])
+ url = str(s["url"] + s["path"])
html = raw_backend.fetch_url(url)
- soup = BeautifulSoup(html, "html.parser",
- parse_only=SoupStrainer('title'))
+ soup = BeautifulSoup(
+ html, "html.parser", parse_only=SoupStrainer("title")
+ )
self.assertEqual(
- google.is_page_candidate(url, soup.title.string,
- s['title'], s['artist']), True, url)
+ google.is_page_candidate(
+ url, soup.title.string, s["title"], s["artist"]
+ ),
+ True,
+ url,
+ )
def test_is_page_candidate_fuzzy_match(self):
"""Test matching html page title with song infos -- when song infos are
- not present in the title.
+ not present in the title.
"""
s = self.source
- url = s['url'] + s['path']
- url_title = 'example.com | Beats song by John doe'
+ url = s["url"] + s["path"]
+ url_title = "example.com | Beats song by John doe"
# very small diffs (typo) are ok eg 'beats' vs 'beets' with same artist
- self.assertEqual(google.is_page_candidate(url, url_title, s['title'],
- s['artist']), True, url)
+ self.assertEqual(
+ google.is_page_candidate(url, url_title, s["title"], s["artist"]),
+ True,
+ url,
+ )
# reject different title
- url_title = 'example.com | seets bong lyrics by John doe'
- self.assertEqual(google.is_page_candidate(url, url_title, s['title'],
- s['artist']), False, url)
+ url_title = "example.com | seets bong lyrics by John doe"
+ self.assertEqual(
+ google.is_page_candidate(url, url_title, s["title"], s["artist"]),
+ False,
+ url,
+ )
def test_is_page_candidate_special_chars(self):
"""Ensure that `is_page_candidate` doesn't crash when the artist
@@ -423,21 +449,22 @@ def test_is_page_candidate_special_chars(self):
"""
# https://github.com/beetbox/beets/issues/1673
s = self.source
- url = s['url'] + s['path']
- url_title = 'foo'
+ url = s["url"] + s["path"]
+ url_title = "foo"
- google.is_page_candidate(url, url_title, s['title'], 'Sunn O)))')
+ google.is_page_candidate(url, url_title, s["title"], "Sunn O)))")
# test Genius backend
+
class GeniusBaseTest(unittest.TestCase):
def setUp(self):
"""Set up configuration."""
try:
- __import__('bs4')
+ __import__("bs4")
except ImportError:
- self.skipTest('Beautiful Soup 4 not available')
+ self.skipTest("Beautiful Soup 4 not available")
class GeniusScrapeLyricsFromHtmlTest(GeniusBaseTest):
@@ -454,13 +481,13 @@ def test_no_lyrics_div(self):
"""
# https://github.com/beetbox/beets/issues/3535
# expected return value None
- url = 'https://genius.com/sample'
+ url = "https://genius.com/sample"
mock = MockFetchUrl()
self.assertEqual(genius._scrape_lyrics_from_html(mock(url)), None)
def test_good_lyrics(self):
"""Ensure we are able to scrape a page with lyrics"""
- url = 'https://genius.com/Ttng-chinchilla-lyrics'
+ url = "https://genius.com/Ttng-chinchilla-lyrics"
mock = MockFetchUrl()
self.assertIsNotNone(genius._scrape_lyrics_from_html(mock(url)))
@@ -475,12 +502,14 @@ def setUp(self):
GeniusBaseTest.setUp(self)
self.plugin = lyrics.LyricsPlugin()
- @patch.object(lyrics.Genius, '_scrape_lyrics_from_html')
- @patch.object(lyrics.Backend, 'fetch_url', return_value=True)
+ @patch.object(lyrics.Genius, "_scrape_lyrics_from_html")
+ @patch.object(lyrics.Backend, "fetch_url", return_value=True)
def test_json(self, mock_fetch_url, mock_scrape):
"""Ensure we're finding artist matches"""
with patch.object(
- lyrics.Genius, '_search', return_value={
+ lyrics.Genius,
+ "_search",
+ return_value={
"response": {
"hits": [
{
@@ -488,51 +517,50 @@ def test_json(self, mock_fetch_url, mock_scrape):
"primary_artist": {
"name": "\u200Bblackbear",
},
- "url": "blackbear_url"
+ "url": "blackbear_url",
}
},
{
"result": {
- "primary_artist": {
- "name": "El\u002Dp"
- },
- "url": "El-p_url"
+ "primary_artist": {"name": "El\u002Dp"},
+ "url": "El-p_url",
}
- }
+ },
]
}
- }
+ },
) as mock_json:
# genius uses zero-width-spaces (\u200B) for lowercase
# artists so we make sure we can match those
- self.assertIsNotNone(genius.fetch('blackbear', 'Idfc'))
+ self.assertIsNotNone(genius.fetch("blackbear", "Idfc"))
mock_fetch_url.assert_called_once_with("blackbear_url")
mock_scrape.assert_called_once_with(True)
# genius uses the hyphen minus (\u002D) as their dash
- self.assertIsNotNone(genius.fetch('El-p', 'Idfc'))
- mock_fetch_url.assert_called_with('El-p_url')
+ self.assertIsNotNone(genius.fetch("El-p", "Idfc"))
+ mock_fetch_url.assert_called_with("El-p_url")
mock_scrape.assert_called_with(True)
# test no matching artist
- self.assertIsNone(genius.fetch('doesntexist', 'none'))
+ self.assertIsNone(genius.fetch("doesntexist", "none"))
# test invalid json
mock_json.return_value = None
- self.assertIsNone(genius.fetch('blackbear', 'Idfc'))
+ self.assertIsNone(genius.fetch("blackbear", "Idfc"))
# TODO: add integration test hitting real api
# test Tekstowo
+
class TekstowoBaseTest(unittest.TestCase):
def setUp(self):
"""Set up configuration."""
try:
- __import__('bs4')
+ __import__("bs4")
except ImportError:
- self.skipTest('Beautiful Soup 4 not available')
+ self.skipTest("Beautiful Soup 4 not available")
class TekstowoExtractLyricsTest(TekstowoBaseTest):
@@ -546,32 +574,45 @@ def setUp(self):
def test_good_lyrics(self):
"""Ensure we are able to scrape a page with lyrics"""
- url = 'https://www.tekstowo.pl/piosenka,24kgoldn,city_of_angels_1.html'
+ url = "https://www.tekstowo.pl/piosenka,24kgoldn,city_of_angels_1.html"
mock = MockFetchUrl()
- self.assertIsNotNone(tekstowo.extract_lyrics(mock(url),
- '24kGoldn', 'City of Angels'))
+ self.assertIsNotNone(
+ tekstowo.extract_lyrics(mock(url), "24kGoldn", "City of Angels")
+ )
def test_no_lyrics(self):
"""Ensure we don't crash when the scraping the html for a Tekstowo page
doesn't contain lyrics
"""
- url = 'https://www.tekstowo.pl/piosenka,beethoven,' \
- 'beethoven_piano_sonata_17_tempest_the_3rd_movement.html'
+ url = (
+ "https://www.tekstowo.pl/piosenka,beethoven,"
+ "beethoven_piano_sonata_17_tempest_the_3rd_movement.html"
+ )
mock = MockFetchUrl()
- self.assertEqual(tekstowo.extract_lyrics(mock(url), 'Beethoven',
- 'Beethoven Piano Sonata 17'
- 'Tempest The 3rd Movement'),
- None)
+ self.assertEqual(
+ tekstowo.extract_lyrics(
+ mock(url),
+ "Beethoven",
+ "Beethoven Piano Sonata 17" "Tempest The 3rd Movement",
+ ),
+ None,
+ )
def test_song_no_match(self):
"""Ensure we return None when a song does not match the search query"""
# https://github.com/beetbox/beets/issues/4406
# expected return value None
- url = 'https://www.tekstowo.pl/piosenka,bailey_bigger' \
- ',black_eyed_susan.html'
+ url = (
+ "https://www.tekstowo.pl/piosenka,bailey_bigger"
+ ",black_eyed_susan.html"
+ )
mock = MockFetchUrl()
- self.assertEqual(tekstowo.extract_lyrics(mock(url), 'Kelly Bailey',
- 'Black Mesa Inbound'), None)
+ self.assertEqual(
+ tekstowo.extract_lyrics(
+ mock(url), "Kelly Bailey", "Black Mesa Inbound"
+ ),
+ None,
+ )
class TekstowoParseSearchResultsTest(TekstowoBaseTest):
@@ -584,17 +625,23 @@ def setUp(self):
def test_multiple_results(self):
"""Ensure we are able to scrape a page with multiple search results"""
- url = 'https://www.tekstowo.pl/szukaj,wykonawca,juice+wrld' \
- ',tytul,lucid+dreams.html'
+ url = (
+ "https://www.tekstowo.pl/szukaj,wykonawca,juice+wrld"
+ ",tytul,lucid+dreams.html"
+ )
mock = MockFetchUrl()
- self.assertEqual(tekstowo.parse_search_results(mock(url)),
- 'http://www.tekstowo.pl/piosenka,juice_wrld,'
- 'lucid_dreams__remix__ft__lil_uzi_vert.html')
+ self.assertEqual(
+ tekstowo.parse_search_results(mock(url)),
+ "http://www.tekstowo.pl/piosenka,juice_wrld,"
+ "lucid_dreams__remix__ft__lil_uzi_vert.html",
+ )
def test_no_results(self):
"""Ensure we are able to scrape a page with no search results"""
- url = 'https://www.tekstowo.pl/szukaj,wykonawca,' \
- 'agfdgja,tytul,agfdgafg.html'
+ url = (
+ "https://www.tekstowo.pl/szukaj,wykonawca,"
+ "agfdgja,tytul,agfdgafg.html"
+ )
mock = MockFetchUrl()
self.assertEqual(tekstowo.parse_search_results(mock(url)), None)
@@ -609,53 +656,56 @@ def setUp(self):
tekstowo.config = self.plugin.config
@unittest.skipUnless(
- os.environ.get('INTEGRATION_TEST', '0') == '1',
- 'integration testing not enabled')
+ os.environ.get("INTEGRATION_TEST", "0") == "1",
+ "integration testing not enabled",
+ )
def test_normal(self):
"""Ensure we can fetch a song's lyrics in the ordinary case"""
- lyrics = tekstowo.fetch('Boy in Space', 'u n eye')
- self.assertLyricsContentOk('u n eye', lyrics)
+ lyrics = tekstowo.fetch("Boy in Space", "u n eye")
+ self.assertLyricsContentOk("u n eye", lyrics)
@unittest.skipUnless(
- os.environ.get('INTEGRATION_TEST', '0') == '1',
- 'integration testing not enabled')
+ os.environ.get("INTEGRATION_TEST", "0") == "1",
+ "integration testing not enabled",
+ )
def test_no_matching_results(self):
"""Ensure we fetch nothing if there are search results
returned but no matches"""
# https://github.com/beetbox/beets/issues/4406
# expected return value None
- lyrics = tekstowo.fetch('Kelly Bailey', 'Black Mesa Inbound')
+ lyrics = tekstowo.fetch("Kelly Bailey", "Black Mesa Inbound")
self.assertEqual(lyrics, None)
# test utilities
-class SlugTests(unittest.TestCase):
+class SlugTests(unittest.TestCase):
def test_slug(self):
# plain ascii passthrough
text = "test"
- self.assertEqual(lyrics.slug(text), 'test')
+ self.assertEqual(lyrics.slug(text), "test")
# german unicode and capitals
text = "Mørdag"
- self.assertEqual(lyrics.slug(text), 'mordag')
+ self.assertEqual(lyrics.slug(text), "mordag")
# more accents and quotes
text = "l'été c'est fait pour jouer"
- self.assertEqual(lyrics.slug(text), 'l-ete-c-est-fait-pour-jouer')
+ self.assertEqual(lyrics.slug(text), "l-ete-c-est-fait-pour-jouer")
# accents, parens and spaces
text = "\xe7afe au lait (boisson)"
- self.assertEqual(lyrics.slug(text), 'cafe-au-lait-boisson')
+ self.assertEqual(lyrics.slug(text), "cafe-au-lait-boisson")
text = "Multiple spaces -- and symbols! -- merged"
- self.assertEqual(lyrics.slug(text),
- 'multiple-spaces-and-symbols-merged')
+ self.assertEqual(
+ lyrics.slug(text), "multiple-spaces-and-symbols-merged"
+ )
text = "\u200Bno-width-space"
- self.assertEqual(lyrics.slug(text), 'no-width-space')
+ self.assertEqual(lyrics.slug(text), "no-width-space")
# variations of dashes should get standardized
- dashes = ['\u200D', '\u2010']
+ dashes = ["\u200D", "\u2010"]
for dash1, dash2 in itertools.combinations(dashes, 2):
self.assertEqual(lyrics.slug(dash1), lyrics.slug(dash2))
@@ -664,5 +714,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_mbsubmit.py b/test/plugins/test_mbsubmit.py
index d8654982d6..6f9c81c047 100644
--- a/test/plugins/test_mbsubmit.py
+++ b/test/plugins/test_mbsubmit.py
@@ -14,16 +14,17 @@
import unittest
-from test.helper import capture_stdout, control_stdin, TestHelper
-from test.test_importer import ImportHelper, AutotagStub
+from test.helper import TestHelper, capture_stdout, control_stdin
+from test.test_importer import AutotagStub, ImportHelper
from test.test_ui_importer import TerminalImportSessionSetup
-class MBSubmitPluginTest(TerminalImportSessionSetup, unittest.TestCase,
- ImportHelper, TestHelper):
+class MBSubmitPluginTest(
+ TerminalImportSessionSetup, unittest.TestCase, ImportHelper, TestHelper
+):
def setUp(self):
self.setup_beets()
- self.load_plugins('mbsubmit')
+ self.load_plugins("mbsubmit")
self._create_import_dir(2)
self._setup_import_session()
self.matcher = AutotagStub().install()
@@ -38,14 +39,16 @@ def test_print_tracks_output(self):
self.matcher.matching = AutotagStub.BAD
with capture_stdout() as output:
- with control_stdin('\n'.join(['p', 's'])):
+ with control_stdin("\n".join(["p", "s"])):
# Print tracks; Skip
self.importer.run()
# Manually build the string for comparing the output.
- tracklist = ('Print tracks? '
- '01. Tag Title 1 - Tag Artist (0:01)\n'
- '02. Tag Title 2 - Tag Artist (0:01)')
+ tracklist = (
+ "Print tracks? "
+ "01. Tag Title 1 - Tag Artist (0:01)\n"
+ "02. Tag Title 2 - Tag Artist (0:01)"
+ )
self.assertIn(tracklist, output.getvalue())
def test_print_tracks_output_as_tracks(self):
@@ -53,18 +56,18 @@ def test_print_tracks_output_as_tracks(self):
self.matcher.matching = AutotagStub.BAD
with capture_stdout() as output:
- with control_stdin('\n'.join(['t', 's', 'p', 's'])):
+ with control_stdin("\n".join(["t", "s", "p", "s"])):
# as Tracks; Skip; Print tracks; Skip
self.importer.run()
# Manually build the string for comparing the output.
- tracklist = ('Print tracks? '
- '02. Tag Title 2 - Tag Artist (0:01)')
+ tracklist = "Print tracks? " "02. Tag Title 2 - Tag Artist (0:01)"
self.assertIn(tracklist, output.getvalue())
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_mbsync.py b/test/plugins/test_mbsync.py
index 443aa044ed..d1c823f026 100644
--- a/test/plugins/test_mbsync.py
+++ b/test/plugins/test_mbsync.py
@@ -14,180 +14,184 @@
import unittest
+from test.helper import (
+ TestHelper,
+ capture_log,
+ generate_album_info,
+ generate_track_info,
+)
from unittest.mock import patch
-from test.helper import TestHelper, \
- generate_album_info, \
- generate_track_info, \
- capture_log
-
from beets import config
from beets.library import Item
class MbsyncCliTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_beets()
- self.load_plugins('mbsync')
+ self.load_plugins("mbsync")
def tearDown(self):
self.unload_plugins()
self.teardown_beets()
- @patch('beets.autotag.mb.album_for_id')
- @patch('beets.autotag.mb.track_for_id')
+ @patch("beets.autotag.mb.album_for_id")
+ @patch("beets.autotag.mb.track_for_id")
def test_update_library(self, track_for_id, album_for_id):
- album_for_id.return_value = \
- generate_album_info(
- 'album id',
- [('track id', {'release_track_id': 'release track id'})]
- )
- track_for_id.return_value = \
- generate_track_info('singleton track id',
- {'title': 'singleton info'})
+ album_for_id.return_value = generate_album_info(
+ "album id", [("track id", {"release_track_id": "release track id"})]
+ )
+ track_for_id.return_value = generate_track_info(
+ "singleton track id", {"title": "singleton info"}
+ )
album_item = Item(
- album='old title',
- mb_albumid='81ae60d4-5b75-38df-903a-db2cfa51c2c6',
- mb_trackid='old track id',
- mb_releasetrackid='release track id',
- path=''
+ album="old title",
+ mb_albumid="81ae60d4-5b75-38df-903a-db2cfa51c2c6",
+ mb_trackid="old track id",
+ mb_releasetrackid="release track id",
+ path="",
)
album = self.lib.add_album([album_item])
item = Item(
- title='old title',
- mb_trackid='b8c2cf90-83f9-3b5f-8ccd-31fb866fcf37',
- path='',
+ title="old title",
+ mb_trackid="b8c2cf90-83f9-3b5f-8ccd-31fb866fcf37",
+ path="",
)
self.lib.add(item)
with capture_log() as logs:
- self.run_command('mbsync')
+ self.run_command("mbsync")
- self.assertIn('Sending event: albuminfo_received', logs)
- self.assertIn('Sending event: trackinfo_received', logs)
+ self.assertIn("Sending event: albuminfo_received", logs)
+ self.assertIn("Sending event: trackinfo_received", logs)
item.load()
- self.assertEqual(item.title, 'singleton info')
+ self.assertEqual(item.title, "singleton info")
album_item.load()
- self.assertEqual(album_item.title, 'track info')
- self.assertEqual(album_item.mb_trackid, 'track id')
+ self.assertEqual(album_item.title, "track info")
+ self.assertEqual(album_item.mb_trackid, "track id")
album.load()
- self.assertEqual(album.album, 'album info')
+ self.assertEqual(album.album, "album info")
def test_message_when_skipping(self):
- config['format_item'] = '$artist - $album - $title'
- config['format_album'] = '$albumartist - $album'
+ config["format_item"] = "$artist - $album - $title"
+ config["format_album"] = "$albumartist - $album"
# Test album with no mb_albumid.
# The default format for an album include $albumartist so
# set that here, too.
album_invalid = Item(
- albumartist='album info',
- album='album info',
- path=''
+ albumartist="album info", album="album info", path=""
)
self.lib.add_album([album_invalid])
# default format
- with capture_log('beets.mbsync') as logs:
- self.run_command('mbsync')
- e = 'mbsync: Skipping album with no mb_albumid: ' + \
- 'album info - album info'
+ with capture_log("beets.mbsync") as logs:
+ self.run_command("mbsync")
+ e = (
+ "mbsync: Skipping album with no mb_albumid: "
+ + "album info - album info"
+ )
self.assertEqual(e, logs[0])
# custom format
- with capture_log('beets.mbsync') as logs:
- self.run_command('mbsync', '-f', "'$album'")
+ with capture_log("beets.mbsync") as logs:
+ self.run_command("mbsync", "-f", "'$album'")
e = "mbsync: Skipping album with no mb_albumid: 'album info'"
self.assertEqual(e, logs[0])
# restore the config
- config['format_item'] = '$artist - $album - $title'
- config['format_album'] = '$albumartist - $album'
+ config["format_item"] = "$artist - $album - $title"
+ config["format_album"] = "$albumartist - $album"
# Test singleton with no mb_trackid.
# The default singleton format includes $artist and $album
# so we need to stub them here
item_invalid = Item(
- artist='album info',
- album='album info',
- title='old title',
- path='',
+ artist="album info",
+ album="album info",
+ title="old title",
+ path="",
)
self.lib.add(item_invalid)
# default format
- with capture_log('beets.mbsync') as logs:
- self.run_command('mbsync')
- e = 'mbsync: Skipping singleton with no mb_trackid: ' + \
- 'album info - album info - old title'
+ with capture_log("beets.mbsync") as logs:
+ self.run_command("mbsync")
+ e = (
+ "mbsync: Skipping singleton with no mb_trackid: "
+ + "album info - album info - old title"
+ )
self.assertEqual(e, logs[0])
# custom format
- with capture_log('beets.mbsync') as logs:
- self.run_command('mbsync', '-f', "'$title'")
+ with capture_log("beets.mbsync") as logs:
+ self.run_command("mbsync", "-f", "'$title'")
e = "mbsync: Skipping singleton with no mb_trackid: 'old title'"
self.assertEqual(e, logs[0])
def test_message_when_invalid(self):
- config['format_item'] = '$artist - $album - $title'
- config['format_album'] = '$albumartist - $album'
+ config["format_item"] = "$artist - $album - $title"
+ config["format_album"] = "$albumartist - $album"
# Test album with invalid mb_albumid.
# The default format for an album include $albumartist so
# set that here, too.
album_invalid = Item(
- albumartist='album info',
- album='album info',
- mb_albumid='a1b2c3d4',
- path=''
+ albumartist="album info",
+ album="album info",
+ mb_albumid="a1b2c3d4",
+ path="",
)
self.lib.add_album([album_invalid])
# default format
- with capture_log('beets.mbsync') as logs:
- self.run_command('mbsync')
- e = 'mbsync: Skipping album with invalid mb_albumid: ' + \
- 'album info - album info'
+ with capture_log("beets.mbsync") as logs:
+ self.run_command("mbsync")
+ e = (
+ "mbsync: Skipping album with invalid mb_albumid: "
+ + "album info - album info"
+ )
self.assertEqual(e, logs[0])
# custom format
- with capture_log('beets.mbsync') as logs:
- self.run_command('mbsync', '-f', "'$album'")
+ with capture_log("beets.mbsync") as logs:
+ self.run_command("mbsync", "-f", "'$album'")
e = "mbsync: Skipping album with invalid mb_albumid: 'album info'"
self.assertEqual(e, logs[0])
# restore the config
- config['format_item'] = '$artist - $album - $title'
- config['format_album'] = '$albumartist - $album'
+ config["format_item"] = "$artist - $album - $title"
+ config["format_album"] = "$albumartist - $album"
# Test singleton with invalid mb_trackid.
# The default singleton format includes $artist and $album
# so we need to stub them here
item_invalid = Item(
- artist='album info',
- album='album info',
- title='old title',
- mb_trackid='a1b2c3d4',
- path='',
+ artist="album info",
+ album="album info",
+ title="old title",
+ mb_trackid="a1b2c3d4",
+ path="",
)
self.lib.add(item_invalid)
# default format
- with capture_log('beets.mbsync') as logs:
- self.run_command('mbsync')
- e = 'mbsync: Skipping singleton with invalid mb_trackid: ' + \
- 'album info - album info - old title'
+ with capture_log("beets.mbsync") as logs:
+ self.run_command("mbsync")
+ e = (
+ "mbsync: Skipping singleton with invalid mb_trackid: "
+ + "album info - album info - old title"
+ )
self.assertEqual(e, logs[0])
# custom format
- with capture_log('beets.mbsync') as logs:
- self.run_command('mbsync', '-f', "'$title'")
+ with capture_log("beets.mbsync") as logs:
+ self.run_command("mbsync", "-f", "'$title'")
e = "mbsync: Skipping singleton with invalid mb_trackid: 'old title'"
self.assertEqual(e, logs[0])
@@ -195,5 +199,6 @@ def test_message_when_invalid(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_mpdstats.py b/test/plugins/test_mpdstats.py
index 03ef8c7b5a..76761da102 100644
--- a/test/plugins/test_mpdstats.py
+++ b/test/plugins/test_mpdstats.py
@@ -14,25 +14,25 @@
import unittest
-from unittest.mock import Mock, patch, call, ANY
from test.helper import TestHelper
+from unittest.mock import ANY, Mock, call, patch
+from beets import util
from beets.library import Item
from beetsplug.mpdstats import MPDStats
-from beets import util
class MPDStatsTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
- self.load_plugins('mpdstats')
+ self.load_plugins("mpdstats")
def tearDown(self):
self.teardown_beets()
self.unload_plugins()
def test_update_rating(self):
- item = Item(title='title', path='', id=1)
+ item = Item(title="title", path="", id=1)
item.add(self.lib)
log = Mock()
@@ -42,31 +42,40 @@ def test_update_rating(self):
self.assertFalse(mpdstats.update_rating(None, True))
def test_get_item(self):
- item_path = util.normpath('/foo/bar.flac')
- item = Item(title='title', path=item_path, id=1)
+ item_path = util.normpath("/foo/bar.flac")
+ item = Item(title="title", path=item_path, id=1)
item.add(self.lib)
log = Mock()
mpdstats = MPDStats(self.lib, log)
self.assertEqual(str(mpdstats.get_item(item_path)), str(item))
- self.assertIsNone(mpdstats.get_item('/some/non-existing/path'))
- self.assertIn('item not found:', log.info.call_args[0][0])
-
- FAKE_UNKNOWN_STATE = 'some-unknown-one'
- STATUSES = [{'state': FAKE_UNKNOWN_STATE},
- {'state': 'pause'},
- {'state': 'play', 'songid': 1, 'time': '0:1'},
- {'state': 'stop'}]
+ self.assertIsNone(mpdstats.get_item("/some/non-existing/path"))
+ self.assertIn("item not found:", log.info.call_args[0][0])
+
+ FAKE_UNKNOWN_STATE = "some-unknown-one"
+ STATUSES = [
+ {"state": FAKE_UNKNOWN_STATE},
+ {"state": "pause"},
+ {"state": "play", "songid": 1, "time": "0:1"},
+ {"state": "stop"},
+ ]
EVENTS = [["player"]] * (len(STATUSES) - 1) + [KeyboardInterrupt]
- item_path = util.normpath('/foo/bar.flac')
+ item_path = util.normpath("/foo/bar.flac")
songid = 1
- @patch("beetsplug.mpdstats.MPDClientWrapper", return_value=Mock(**{
- "events.side_effect": EVENTS, "status.side_effect": STATUSES,
- "currentsong.return_value": (item_path, songid)}))
+ @patch(
+ "beetsplug.mpdstats.MPDClientWrapper",
+ return_value=Mock(
+ **{
+ "events.side_effect": EVENTS,
+ "status.side_effect": STATUSES,
+ "currentsong.return_value": (item_path, songid),
+ }
+ ),
+ )
def test_run_mpdstats(self, mpd_mock):
- item = Item(title='title', path=self.item_path, id=1)
+ item = Item(title="title", path=self.item_path, id=1)
item.add(self.lib)
log = Mock()
@@ -75,14 +84,15 @@ def test_run_mpdstats(self, mpd_mock):
except KeyboardInterrupt:
pass
- log.debug.assert_has_calls(
- [call('unhandled status "{0}"', ANY)])
+ log.debug.assert_has_calls([call('unhandled status "{0}"', ANY)])
log.info.assert_has_calls(
- [call('pause'), call('playing {0}', ANY), call('stop')])
+ [call("pause"), call("playing {0}", ANY), call("stop")]
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_parentwork.py b/test/plugins/test_parentwork.py
index dbaa5976ca..3e88823fb9 100644
--- a/test/plugins/test_parentwork.py
+++ b/test/plugins/test_parentwork.py
@@ -23,43 +23,65 @@
from beets.library import Item
from beetsplug import parentwork
-
-work = {'work': {'id': '1',
- 'title': 'work',
- 'work-relation-list': [{'type': 'parts',
- 'direction': 'backward',
- 'work': {'id': '2'}}],
- 'artist-relation-list': [{'type': 'composer',
- 'artist': {'name':
- 'random composer',
- 'sort-name':
- 'composer, random'}}]}}
-dp_work = {'work': {'id': '2',
- 'title': 'directparentwork',
- 'work-relation-list': [{'type': 'parts',
- 'direction': 'backward',
- 'work': {'id': '3'}}],
- 'artist-relation-list': [{'type': 'composer',
- 'artist': {'name':
- 'random composer',
- 'sort-name':
- 'composer, random'
- }}]}}
-p_work = {'work': {'id': '3',
- 'title': 'parentwork',
- 'artist-relation-list': [{'type': 'composer',
- 'artist': {'name':
- 'random composer',
- 'sort-name':
- 'composer, random'}}]}}
+work = {
+ "work": {
+ "id": "1",
+ "title": "work",
+ "work-relation-list": [
+ {"type": "parts", "direction": "backward", "work": {"id": "2"}}
+ ],
+ "artist-relation-list": [
+ {
+ "type": "composer",
+ "artist": {
+ "name": "random composer",
+ "sort-name": "composer, random",
+ },
+ }
+ ],
+ }
+}
+dp_work = {
+ "work": {
+ "id": "2",
+ "title": "directparentwork",
+ "work-relation-list": [
+ {"type": "parts", "direction": "backward", "work": {"id": "3"}}
+ ],
+ "artist-relation-list": [
+ {
+ "type": "composer",
+ "artist": {
+ "name": "random composer",
+ "sort-name": "composer, random",
+ },
+ }
+ ],
+ }
+}
+p_work = {
+ "work": {
+ "id": "3",
+ "title": "parentwork",
+ "artist-relation-list": [
+ {
+ "type": "composer",
+ "artist": {
+ "name": "random composer",
+ "sort-name": "composer, random",
+ },
+ }
+ ],
+ }
+}
def mock_workid_response(mbid, includes):
- if mbid == '1':
+ if mbid == "1":
return work
- elif mbid == '2':
+ elif mbid == "2":
return dp_work
- elif mbid == '3':
+ elif mbid == "3":
return p_work
@@ -67,7 +89,7 @@ class ParentWorkIntegrationTest(unittest.TestCase, TestHelper):
def setUp(self):
"""Set up configuration"""
self.setup_beets()
- self.load_plugins('parentwork')
+ self.load_plugins("parentwork")
def tearDown(self):
self.unload_plugins()
@@ -75,76 +97,97 @@ def tearDown(self):
# test how it works with real musicbrainz data
@unittest.skipUnless(
- os.environ.get('INTEGRATION_TEST', '0') == '1',
- 'integration testing not enabled')
+ os.environ.get("INTEGRATION_TEST", "0") == "1",
+ "integration testing not enabled",
+ )
def test_normal_case_real(self):
- item = Item(path='/file',
- mb_workid='e27bda6e-531e-36d3-9cd7-b8ebc18e8c53',
- parentwork_workid_current='e27bda6e-531e-36d3-9cd7-\
- b8ebc18e8c53')
+ item = Item(
+ path="/file",
+ mb_workid="e27bda6e-531e-36d3-9cd7-b8ebc18e8c53",
+ parentwork_workid_current="e27bda6e-531e-36d3-9cd7-\
+ b8ebc18e8c53",
+ )
item.add(self.lib)
- self.run_command('parentwork')
+ self.run_command("parentwork")
item.load()
- self.assertEqual(item['mb_parentworkid'],
- '32c8943f-1b27-3a23-8660-4567f4847c94')
+ self.assertEqual(
+ item["mb_parentworkid"], "32c8943f-1b27-3a23-8660-4567f4847c94"
+ )
@unittest.skipUnless(
- os.environ.get('INTEGRATION_TEST', '0') == '1',
- 'integration testing not enabled')
+ os.environ.get("INTEGRATION_TEST", "0") == "1",
+ "integration testing not enabled",
+ )
def test_force_real(self):
- self.config['parentwork']['force'] = True
- item = Item(path='/file',
- mb_workid='e27bda6e-531e-36d3-9cd7-b8ebc18e8c53',
- mb_parentworkid='XXX',
- parentwork_workid_current='e27bda6e-531e-36d3-9cd7-\
- b8ebc18e8c53', parentwork='whatever')
+ self.config["parentwork"]["force"] = True
+ item = Item(
+ path="/file",
+ mb_workid="e27bda6e-531e-36d3-9cd7-b8ebc18e8c53",
+ mb_parentworkid="XXX",
+ parentwork_workid_current="e27bda6e-531e-36d3-9cd7-\
+ b8ebc18e8c53",
+ parentwork="whatever",
+ )
item.add(self.lib)
- self.run_command('parentwork')
+ self.run_command("parentwork")
item.load()
- self.assertEqual(item['mb_parentworkid'],
- '32c8943f-1b27-3a23-8660-4567f4847c94')
+ self.assertEqual(
+ item["mb_parentworkid"], "32c8943f-1b27-3a23-8660-4567f4847c94"
+ )
@unittest.skipUnless(
- os.environ.get('INTEGRATION_TEST', '0') == '1',
- 'integration testing not enabled')
+ os.environ.get("INTEGRATION_TEST", "0") == "1",
+ "integration testing not enabled",
+ )
def test_no_force_real(self):
- self.config['parentwork']['force'] = False
- item = Item(path='/file', mb_workid='e27bda6e-531e-36d3-9cd7-\
- b8ebc18e8c53', mb_parentworkid='XXX',
- parentwork_workid_current='e27bda6e-531e-36d3-9cd7-\
- b8ebc18e8c53', parentwork='whatever')
+ self.config["parentwork"]["force"] = False
+ item = Item(
+ path="/file",
+ mb_workid="e27bda6e-531e-36d3-9cd7-\
+ b8ebc18e8c53",
+ mb_parentworkid="XXX",
+ parentwork_workid_current="e27bda6e-531e-36d3-9cd7-\
+ b8ebc18e8c53",
+ parentwork="whatever",
+ )
item.add(self.lib)
- self.run_command('parentwork')
+ self.run_command("parentwork")
item.load()
- self.assertEqual(item['mb_parentworkid'], 'XXX')
+ self.assertEqual(item["mb_parentworkid"], "XXX")
# test different cases, still with Matthew Passion Ouverture or Mozart
# requiem
@unittest.skipUnless(
- os.environ.get('INTEGRATION_TEST', '0') == '1',
- 'integration testing not enabled')
+ os.environ.get("INTEGRATION_TEST", "0") == "1",
+ "integration testing not enabled",
+ )
def test_direct_parent_work_real(self):
- mb_workid = '2e4a3668-458d-3b2a-8be2-0b08e0d8243a'
- self.assertEqual('f04b42df-7251-4d86-a5ee-67cfa49580d1',
- parentwork.direct_parent_id(mb_workid)[0])
- self.assertEqual('45afb3b2-18ac-4187-bc72-beb1b1c194ba',
- parentwork.work_parent_id(mb_workid)[0])
+ mb_workid = "2e4a3668-458d-3b2a-8be2-0b08e0d8243a"
+ self.assertEqual(
+ "f04b42df-7251-4d86-a5ee-67cfa49580d1",
+ parentwork.direct_parent_id(mb_workid)[0],
+ )
+ self.assertEqual(
+ "45afb3b2-18ac-4187-bc72-beb1b1c194ba",
+ parentwork.work_parent_id(mb_workid)[0],
+ )
class ParentWorkTest(unittest.TestCase, TestHelper):
def setUp(self):
"""Set up configuration"""
self.setup_beets()
- self.load_plugins('parentwork')
- self.patcher = patch('musicbrainzngs.get_work_by_id',
- side_effect=mock_workid_response)
+ self.load_plugins("parentwork")
+ self.patcher = patch(
+ "musicbrainzngs.get_work_by_id", side_effect=mock_workid_response
+ )
self.patcher.start()
def tearDown(self):
@@ -153,44 +196,54 @@ def tearDown(self):
self.patcher.stop()
def test_normal_case(self):
- item = Item(path='/file', mb_workid='1', parentwork_workid_current='1')
+ item = Item(path="/file", mb_workid="1", parentwork_workid_current="1")
item.add(self.lib)
- self.run_command('parentwork')
+ self.run_command("parentwork")
item.load()
- self.assertEqual(item['mb_parentworkid'], '3')
+ self.assertEqual(item["mb_parentworkid"], "3")
def test_force(self):
- self.config['parentwork']['force'] = True
- item = Item(path='/file', mb_workid='1', mb_parentworkid='XXX',
- parentwork_workid_current='1', parentwork='parentwork')
+ self.config["parentwork"]["force"] = True
+ item = Item(
+ path="/file",
+ mb_workid="1",
+ mb_parentworkid="XXX",
+ parentwork_workid_current="1",
+ parentwork="parentwork",
+ )
item.add(self.lib)
- self.run_command('parentwork')
+ self.run_command("parentwork")
item.load()
- self.assertEqual(item['mb_parentworkid'], '3')
+ self.assertEqual(item["mb_parentworkid"], "3")
def test_no_force(self):
- self.config['parentwork']['force'] = False
- item = Item(path='/file', mb_workid='1', mb_parentworkid='XXX',
- parentwork_workid_current='1', parentwork='parentwork')
+ self.config["parentwork"]["force"] = False
+ item = Item(
+ path="/file",
+ mb_workid="1",
+ mb_parentworkid="XXX",
+ parentwork_workid_current="1",
+ parentwork="parentwork",
+ )
item.add(self.lib)
- self.run_command('parentwork')
+ self.run_command("parentwork")
item.load()
- self.assertEqual(item['mb_parentworkid'], 'XXX')
+ self.assertEqual(item["mb_parentworkid"], "XXX")
def test_direct_parent_work(self):
- self.assertEqual('2', parentwork.direct_parent_id('1')[0])
- self.assertEqual('3', parentwork.work_parent_id('1')[0])
+ self.assertEqual("2", parentwork.direct_parent_id("1")[0])
+ self.assertEqual("3", parentwork.work_parent_id("1")[0])
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_permissions.py b/test/plugins/test_permissions.py
index c84de5e977..d6175de5a7 100644
--- a/test/plugins/test_permissions.py
+++ b/test/plugins/test_permissions.py
@@ -4,24 +4,24 @@
import os
import platform
import unittest
-from unittest.mock import patch, Mock
-
-from test.helper import TestHelper
from test._common import touch
+from test.helper import TestHelper
+from unittest.mock import Mock, patch
+
from beets.util import displayable_path
-from beetsplug.permissions import (check_permissions,
- convert_perm,
- dirs_in_library)
+from beetsplug.permissions import (
+ check_permissions,
+ convert_perm,
+ dirs_in_library,
+)
class PermissionsPluginTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
- self.load_plugins('permissions')
+ self.load_plugins("permissions")
- self.config['permissions'] = {
- 'file': '777',
- 'dir': '777'}
+ self.config["permissions"] = {"file": "777", "dir": "777"}
def tearDown(self):
self.teardown_beets()
@@ -31,7 +31,7 @@ def test_permissions_on_album_imported(self):
self.do_thing(True)
def test_permissions_on_item_imported(self):
- self.config['import']['singletons'] = True
+ self.config["import"]["singletons"] = True
self.do_thing(True)
@patch("os.chmod", Mock())
@@ -39,43 +39,50 @@ def test_failing_to_set_permissions(self):
self.do_thing(False)
def do_thing(self, expect_success):
- if platform.system() == 'Windows':
- self.skipTest('permissions not available on Windows')
+ if platform.system() == "Windows":
+ self.skipTest("permissions not available on Windows")
def get_stat(v):
- return os.stat(
- os.path.join(self.temp_dir, b'import', *v)).st_mode & 0o777
+ return (
+ os.stat(os.path.join(self.temp_dir, b"import", *v)).st_mode
+ & 0o777
+ )
+
self.importer = self.create_importer()
- typs = ['file', 'dir']
+ typs = ["file", "dir"]
- track_file = (b'album 0', b'track 0.mp3')
+ track_file = (b"album 0", b"track 0.mp3")
self.exp_perms = {
- True: {k: convert_perm(self.config['permissions'][k].get())
- for k in typs},
- False: {k: get_stat(v) for (k, v) in zip(typs, (track_file, ()))}
+ True: {
+ k: convert_perm(self.config["permissions"][k].get())
+ for k in typs
+ },
+ False: {k: get_stat(v) for (k, v) in zip(typs, (track_file, ()))},
}
self.importer.run()
item = self.lib.items().get()
- self.assertPerms(item.path, 'file', expect_success)
+ self.assertPerms(item.path, "file", expect_success)
for path in dirs_in_library(self.lib.directory, item.path):
- self.assertPerms(path, 'dir', expect_success)
+ self.assertPerms(path, "dir", expect_success)
def assertPerms(self, path, typ, expect_success): # noqa
- for x in [(True, self.exp_perms[expect_success][typ], '!='),
- (False, self.exp_perms[not expect_success][typ], '==')]:
- msg = '{} : {} {} {}'.format(
+ for x in [
+ (True, self.exp_perms[expect_success][typ], "!="),
+ (False, self.exp_perms[not expect_success][typ], "=="),
+ ]:
+ msg = "{} : {} {} {}".format(
displayable_path(path),
oct(os.stat(path).st_mode),
x[2],
- oct(x[1])
+ oct(x[1]),
)
self.assertEqual(x[0], check_permissions(path, x[1]), msg=msg)
def test_convert_perm_from_string(self):
- self.assertEqual(convert_perm('10'), 8)
+ self.assertEqual(convert_perm("10"), 8)
def test_convert_perm_from_int(self):
self.assertEqual(convert_perm(10), 8)
@@ -88,21 +95,22 @@ def test_failing_permissions_on_set_art(self):
self.do_set_art(False)
def do_set_art(self, expect_success):
- if platform.system() == 'Windows':
- self.skipTest('permissions not available on Windows')
+ if platform.system() == "Windows":
+ self.skipTest("permissions not available on Windows")
self.importer = self.create_importer()
self.importer.run()
album = self.lib.albums().get()
- artpath = os.path.join(self.temp_dir, b'cover.jpg')
+ artpath = os.path.join(self.temp_dir, b"cover.jpg")
touch(artpath)
album.set_art(artpath)
- self.assertEqual(expect_success,
- check_permissions(album.artpath, 0o777))
+ self.assertEqual(
+ expect_success, check_permissions(album.artpath, 0o777)
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_play.py b/test/plugins/test_play.py
index 8577aee70c..4049878b65 100644
--- a/test/plugins/test_play.py
+++ b/test/plugins/test_play.py
@@ -17,128 +17,136 @@
import os
import sys
-
import unittest
-from unittest.mock import patch, ANY
-
from test.helper import TestHelper, control_stdin
+from unittest.mock import ANY, patch
from beets.ui import UserError
from beets.util import open_anything
-@patch('beetsplug.play.util.interactive_open')
+@patch("beetsplug.play.util.interactive_open")
class PlayPluginTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
- self.load_plugins('play')
- self.item = self.add_item(album='a nice älbum', title='aNiceTitle')
+ self.load_plugins("play")
+ self.item = self.add_item(album="a nice älbum", title="aNiceTitle")
self.lib.add_album([self.item])
- self.config['play']['command'] = 'echo'
+ self.config["play"]["command"] = "echo"
def tearDown(self):
self.teardown_beets()
self.unload_plugins()
- def run_and_assert(self, open_mock, args=('title:aNiceTitle',),
- expected_cmd='echo', expected_playlist=None):
- self.run_command('play', *args)
+ def run_and_assert(
+ self,
+ open_mock,
+ args=("title:aNiceTitle",),
+ expected_cmd="echo",
+ expected_playlist=None,
+ ):
+ self.run_command("play", *args)
open_mock.assert_called_once_with(ANY, expected_cmd)
- expected_playlist = expected_playlist or self.item.path.decode('utf-8')
- exp_playlist = expected_playlist + '\n'
- with open(open_mock.call_args[0][0][0], 'rb') as playlist:
- self.assertEqual(exp_playlist, playlist.read().decode('utf-8'))
+ expected_playlist = expected_playlist or self.item.path.decode("utf-8")
+ exp_playlist = expected_playlist + "\n"
+ with open(open_mock.call_args[0][0][0], "rb") as playlist:
+ self.assertEqual(exp_playlist, playlist.read().decode("utf-8"))
def test_basic(self, open_mock):
self.run_and_assert(open_mock)
def test_album_option(self, open_mock):
- self.run_and_assert(open_mock, ['-a', 'nice'])
+ self.run_and_assert(open_mock, ["-a", "nice"])
def test_args_option(self, open_mock):
self.run_and_assert(
- open_mock, ['-A', 'foo', 'title:aNiceTitle'], 'echo foo')
+ open_mock, ["-A", "foo", "title:aNiceTitle"], "echo foo"
+ )
def test_args_option_in_middle(self, open_mock):
- self.config['play']['command'] = 'echo $args other'
+ self.config["play"]["command"] = "echo $args other"
self.run_and_assert(
- open_mock, ['-A', 'foo', 'title:aNiceTitle'], 'echo foo other')
+ open_mock, ["-A", "foo", "title:aNiceTitle"], "echo foo other"
+ )
def test_unset_args_option_in_middle(self, open_mock):
- self.config['play']['command'] = 'echo $args other'
+ self.config["play"]["command"] = "echo $args other"
- self.run_and_assert(
- open_mock, ['title:aNiceTitle'], 'echo other')
+ self.run_and_assert(open_mock, ["title:aNiceTitle"], "echo other")
# FIXME: fails on windows
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_relative_to(self, open_mock):
- self.config['play']['command'] = 'echo'
- self.config['play']['relative_to'] = '/something'
+ self.config["play"]["command"] = "echo"
+ self.config["play"]["relative_to"] = "/something"
- path = os.path.relpath(self.item.path, b'/something')
- playlist = path.decode('utf-8')
+ path = os.path.relpath(self.item.path, b"/something")
+ playlist = path.decode("utf-8")
self.run_and_assert(
- open_mock, expected_cmd='echo', expected_playlist=playlist)
+ open_mock, expected_cmd="echo", expected_playlist=playlist
+ )
def test_use_folders(self, open_mock):
- self.config['play']['command'] = None
- self.config['play']['use_folders'] = True
- self.run_command('play', '-a', 'nice')
+ self.config["play"]["command"] = None
+ self.config["play"]["use_folders"] = True
+ self.run_command("play", "-a", "nice")
open_mock.assert_called_once_with(ANY, open_anything())
- with open(open_mock.call_args[0][0][0], 'rb') as f:
- playlist = f.read().decode('utf-8')
- self.assertEqual('{}\n'.format(
- os.path.dirname(self.item.path.decode('utf-8'))),
- playlist)
+ with open(open_mock.call_args[0][0][0], "rb") as f:
+ playlist = f.read().decode("utf-8")
+ self.assertEqual(
+ "{}\n".format(os.path.dirname(self.item.path.decode("utf-8"))),
+ playlist,
+ )
def test_raw(self, open_mock):
- self.config['play']['raw'] = True
+ self.config["play"]["raw"] = True
- self.run_command('play', 'nice')
+ self.run_command("play", "nice")
- open_mock.assert_called_once_with([self.item.path], 'echo')
+ open_mock.assert_called_once_with([self.item.path], "echo")
def test_not_found(self, open_mock):
- self.run_command('play', 'not found')
+ self.run_command("play", "not found")
open_mock.assert_not_called()
def test_warning_threshold(self, open_mock):
- self.config['play']['warning_threshold'] = 1
- self.add_item(title='another NiceTitle')
+ self.config["play"]["warning_threshold"] = 1
+ self.add_item(title="another NiceTitle")
with control_stdin("a"):
- self.run_command('play', 'nice')
+ self.run_command("play", "nice")
open_mock.assert_not_called()
def test_skip_warning_threshold_bypass(self, open_mock):
- self.config['play']['warning_threshold'] = 1
- self.other_item = self.add_item(title='another NiceTitle')
+ self.config["play"]["warning_threshold"] = 1
+ self.other_item = self.add_item(title="another NiceTitle")
- expected_playlist = '{}\n{}'.format(
- self.item.path.decode('utf-8'),
- self.other_item.path.decode('utf-8'))
+ expected_playlist = "{}\n{}".format(
+ self.item.path.decode("utf-8"), self.other_item.path.decode("utf-8")
+ )
with control_stdin("a"):
self.run_and_assert(
open_mock,
- ['-y', 'NiceTitle'],
- expected_playlist=expected_playlist)
+ ["-y", "NiceTitle"],
+ expected_playlist=expected_playlist,
+ )
def test_command_failed(self, open_mock):
open_mock.side_effect = OSError("some reason")
with self.assertRaises(UserError):
- self.run_command('play', 'title:aNiceTitle')
+ self.run_command("play", "title:aNiceTitle")
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_player.py b/test/plugins/test_player.py
index 7a4e85b17b..480b876910 100644
--- a/test/plugins/test_player.py
+++ b/test/plugins/test_player.py
@@ -15,45 +15,59 @@
"""Tests for BPD's implementation of the MPD protocol.
"""
-import unittest
-from test.helper import TestHelper
-
+import importlib.util
+import multiprocessing as mp
import os
+import socket
import sys
-import multiprocessing as mp
+import tempfile
import threading
-import socket
import time
-import yaml
-import tempfile
+import unittest
from contextlib import contextmanager
+from test.helper import TestHelper
+
+# Mock GstPlayer so that the forked process doesn't attempt to import gi:
+from unittest import mock
-from beets.util import py3_path, bluelet
-from beetsplug import bpd
import confuse
+import yaml
+from beets.util import bluelet, py3_path
+from beetsplug import bpd
-# Mock GstPlayer so that the forked process doesn't attempt to import gi:
-from unittest import mock
-import importlib.util
gstplayer = importlib.util.module_from_spec(
importlib.util.find_spec("beetsplug.bpd.gstplayer")
)
+
+
def _gstplayer_play(*_): # noqa: 42
bpd.gstplayer._GstPlayer.playing = True
return mock.DEFAULT
+
+
gstplayer._GstPlayer = mock.MagicMock(
spec_set=[
- "time", "volume", "playing", "run", "play_file", "pause", "stop",
- "seek", "play", "get_decoders",
- ], **{
- 'playing': False,
- 'volume': 0,
- 'time.return_value': (0, 0),
- 'play_file.side_effect': _gstplayer_play,
- 'play.side_effect': _gstplayer_play,
- 'get_decoders.return_value': {'default': ({'audio/mpeg'}, {'mp3'})},
- })
+ "time",
+ "volume",
+ "playing",
+ "run",
+ "play_file",
+ "pause",
+ "stop",
+ "seek",
+ "play",
+ "get_decoders",
+ ],
+ **{
+ "playing": False,
+ "volume": 0,
+ "time.return_value": (0, 0),
+ "play_file.side_effect": _gstplayer_play,
+ "play.side_effect": _gstplayer_play,
+ "get_decoders.return_value": {"default": ({"audio/mpeg"}, {"mp3"})},
+ },
+)
gstplayer.GstPlayer = lambda _: gstplayer._GstPlayer
sys.modules["beetsplug.bpd.gstplayer"] = gstplayer
bpd.gstplayer = gstplayer
@@ -61,34 +75,34 @@ def _gstplayer_play(*_): # noqa: 42
class CommandParseTest(unittest.TestCase):
def test_no_args(self):
- s = r'command'
+ s = r"command"
c = bpd.Command(s)
- self.assertEqual(c.name, 'command')
+ self.assertEqual(c.name, "command")
self.assertEqual(c.args, [])
def test_one_unquoted_arg(self):
- s = r'command hello'
+ s = r"command hello"
c = bpd.Command(s)
- self.assertEqual(c.name, 'command')
- self.assertEqual(c.args, ['hello'])
+ self.assertEqual(c.name, "command")
+ self.assertEqual(c.args, ["hello"])
def test_two_unquoted_args(self):
- s = r'command hello there'
+ s = r"command hello there"
c = bpd.Command(s)
- self.assertEqual(c.name, 'command')
- self.assertEqual(c.args, ['hello', 'there'])
+ self.assertEqual(c.name, "command")
+ self.assertEqual(c.args, ["hello", "there"])
def test_one_quoted_arg(self):
s = r'command "hello there"'
c = bpd.Command(s)
- self.assertEqual(c.name, 'command')
- self.assertEqual(c.args, ['hello there'])
+ self.assertEqual(c.name, "command")
+ self.assertEqual(c.args, ["hello there"])
def test_heterogenous_args(self):
s = r'command "hello there" sir'
c = bpd.Command(s)
- self.assertEqual(c.name, 'command')
- self.assertEqual(c.args, ['hello there', 'sir'])
+ self.assertEqual(c.name, "command")
+ self.assertEqual(c.args, ["hello there", "sir"])
def test_quote_in_arg(self):
s = r'command "hello \" there"'
@@ -98,42 +112,41 @@ def test_quote_in_arg(self):
def test_backslash_in_arg(self):
s = r'command "hello \\ there"'
c = bpd.Command(s)
- self.assertEqual(c.args, ['hello \\ there'])
+ self.assertEqual(c.args, ["hello \\ there"])
class MPCResponse:
def __init__(self, raw_response):
- body = b'\n'.join(raw_response.split(b'\n')[:-2]).decode('utf-8')
+ body = b"\n".join(raw_response.split(b"\n")[:-2]).decode("utf-8")
self.data = self._parse_body(body)
- status = raw_response.split(b'\n')[-2].decode('utf-8')
+ status = raw_response.split(b"\n")[-2].decode("utf-8")
self.ok, self.err_data = self._parse_status(status)
def _parse_status(self, status):
- """ Parses the first response line, which contains the status.
- """
- if status.startswith('OK') or status.startswith('list_OK'):
+ """Parses the first response line, which contains the status."""
+ if status.startswith("OK") or status.startswith("list_OK"):
return True, None
- elif status.startswith('ACK'):
- code, rest = status[5:].split('@', 1)
- pos, rest = rest.split(']', 1)
- cmd, rest = rest[2:].split('}')
+ elif status.startswith("ACK"):
+ code, rest = status[5:].split("@", 1)
+ pos, rest = rest.split("]", 1)
+ cmd, rest = rest[2:].split("}")
return False, (int(code), int(pos), cmd, rest[1:])
else:
- raise RuntimeError(f'Unexpected status: {status!r}')
+ raise RuntimeError(f"Unexpected status: {status!r}")
def _parse_body(self, body):
- """ Messages are generally in the format "header: content".
+ """Messages are generally in the format "header: content".
Convert them into a dict, storing the values for repeated headers as
lists of strings, and non-repeated ones as string.
"""
data = {}
repeated_headers = set()
- for line in body.split('\n'):
+ for line in body.split("\n"):
if not line:
continue
- if ':' not in line:
- raise RuntimeError(f'Unexpected line: {line!r}')
- header, content = line.split(':', 1)
+ if ":" not in line:
+ raise RuntimeError(f"Unexpected line: {line!r}")
+ header, content = line.split(":", 1)
content = content.lstrip()
if header in repeated_headers:
data[header].append(content)
@@ -148,46 +161,46 @@ def _parse_body(self, body):
class MPCClient:
def __init__(self, sock, do_hello=True):
self.sock = sock
- self.buf = b''
+ self.buf = b""
if do_hello:
hello = self.get_response()
if not hello.ok:
- raise RuntimeError('Bad hello')
+ raise RuntimeError("Bad hello")
def get_response(self, force_multi=None):
- """ Wait for a full server response and wrap it in a helper class.
+ """Wait for a full server response and wrap it in a helper class.
If the request was a batch request then this will return a list of
`MPCResponse`s, one for each processed subcommand.
"""
- response = b''
+ response = b""
responses = []
while True:
line = self.readline()
response += line
- if line.startswith(b'OK') or line.startswith(b'ACK'):
+ if line.startswith(b"OK") or line.startswith(b"ACK"):
if force_multi or any(responses):
- if line.startswith(b'ACK'):
+ if line.startswith(b"ACK"):
responses.append(MPCResponse(response))
n_remaining = force_multi - len(responses)
responses.extend([None] * n_remaining)
return responses
else:
return MPCResponse(response)
- if line.startswith(b'list_OK'):
+ if line.startswith(b"list_OK"):
responses.append(MPCResponse(response))
- response = b''
+ response = b""
elif not line:
- raise RuntimeError(f'Unexpected response: {line!r}')
+ raise RuntimeError(f"Unexpected response: {line!r}")
def serialise_command(self, command, *args):
- cmd = [command.encode('utf-8')]
- for arg in [a.encode('utf-8') for a in args]:
- if b' ' in arg:
+ cmd = [command.encode("utf-8")]
+ for arg in [a.encode("utf-8") for a in args]:
+ if b" " in arg:
cmd.append(b'"' + arg + b'"')
else:
cmd.append(arg)
- return b' '.join(cmd) + b'\n'
+ return b" ".join(cmd) + b"\n"
def send_command(self, command, *args):
request = self.serialise_command(command, *args)
@@ -195,7 +208,7 @@ def send_command(self, command, *args):
return self.get_response()
def send_commands(self, *commands):
- """ Use MPD command batching to send multiple commands at once.
+ """Use MPD command batching to send multiple commands at once.
Each item of commands is a tuple containing a command followed by
any arguments.
"""
@@ -205,15 +218,14 @@ def send_commands(self, *commands):
command = command_and_args[0]
args = command_and_args[1:]
requests.append(self.serialise_command(command, *args))
- requests.insert(0, b'command_list_ok_begin\n')
- requests.append(b'command_list_end\n')
- request = b''.join(requests)
+ requests.insert(0, b"command_list_ok_begin\n")
+ requests.append(b"command_list_end\n")
+ request = b"".join(requests)
self.sock.sendall(request)
return self.get_response(force_multi=len(commands))
- def readline(self, terminator=b'\n', bufsize=1024):
- """ Reads a line of data from the socket.
- """
+ def readline(self, terminator=b"\n", bufsize=1024):
+ """Reads a line of data from the socket."""
while True:
if terminator in self.buf:
@@ -226,17 +238,18 @@ def readline(self, terminator=b'\n', bufsize=1024):
self.buf += data
else:
line = self.buf
- self.buf = b''
+ self.buf = b""
return line
def implements(commands, expectedFailure=False): # noqa: N803
def _test(self):
with self.run_bpd() as client:
- response = client.send_command('commands')
+ response = client.send_command("commands")
self._assert_ok(response)
- implemented = response.data['command']
+ implemented = response.data["command"]
self.assertEqual(commands.intersection(implemented), commands)
+
return unittest.expectedFailure(_test) if expectedFailure else _test
@@ -245,11 +258,10 @@ def _test(self):
@mock.patch("beets.util.bluelet.Listener")
def start_server(args, assigned_port, listener_patch):
- """Start the bpd server, writing the port to `assigned_port`.
- """
+ """Start the bpd server, writing the port to `assigned_port`."""
+
def listener_wrap(host, port):
- """Wrap `bluelet.Listener`, writing the port to `assigend_port`.
- """
+ """Wrap `bluelet.Listener`, writing the port to `assigend_port`."""
# `bluelet.Listener` has previously been saved to
# `bluelet_listener` as this function will replace it at its
# original location.
@@ -257,22 +269,30 @@ def listener_wrap(host, port):
# read port assigned by OS
assigned_port.put_nowait(listener.sock.getsockname()[1])
return listener
+
listener_patch.side_effect = listener_wrap
import beets.ui
+
beets.ui.main(args)
class BPDTestHelper(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets(disk=True)
- self.load_plugins('bpd')
+ self.load_plugins("bpd")
self.item1 = self.add_item(
- title='Track One Title', track=1,
- album='Album Title', artist='Artist Name')
+ title="Track One Title",
+ track=1,
+ album="Album Title",
+ artist="Artist Name",
+ )
self.item2 = self.add_item(
- title='Track Two Title', track=2,
- album='Album Title', artist='Artist Name')
+ title="Track Two Title",
+ track=2,
+ album="Album Title",
+ artist="Artist Name",
+ )
self.lib.add_album([self.item1, self.item2])
def tearDown(self):
@@ -280,36 +300,51 @@ def tearDown(self):
self.unload_plugins()
@contextmanager
- def run_bpd(self, host='localhost', password=None, do_hello=True,
- second_client=False):
- """ Runs BPD in another process, configured with the same library
+ def run_bpd(
+ self,
+ host="localhost",
+ password=None,
+ do_hello=True,
+ second_client=False,
+ ):
+ """Runs BPD in another process, configured with the same library
database as we created in the setUp method. Exposes a client that is
connected to the server, and kills the server at the end.
"""
# Create a config file:
config = {
- 'pluginpath': [py3_path(self.temp_dir)],
- 'plugins': 'bpd',
- # use port 0 to let the OS choose a free port
- 'bpd': {'host': host, 'port': 0, 'control_port': 0},
+ "pluginpath": [py3_path(self.temp_dir)],
+ "plugins": "bpd",
+ # use port 0 to let the OS choose a free port
+ "bpd": {"host": host, "port": 0, "control_port": 0},
}
if password:
- config['bpd']['password'] = password
+ config["bpd"]["password"] = password
config_file = tempfile.NamedTemporaryFile(
- mode='wb', dir=py3_path(self.temp_dir), suffix='.yaml',
- delete=False)
+ mode="wb", dir=py3_path(self.temp_dir), suffix=".yaml", delete=False
+ )
config_file.write(
- yaml.dump(config, Dumper=confuse.Dumper, encoding='utf-8'))
+ yaml.dump(config, Dumper=confuse.Dumper, encoding="utf-8")
+ )
config_file.close()
# Fork and launch BPD in the new process:
assigned_port = mp.Queue(2) # 2 slots, `control_port` and `port`
- server = mp.Process(target=start_server, args=([
- '--library', self.config['library'].as_filename(),
- '--directory', py3_path(self.libdir),
- '--config', py3_path(config_file.name),
- 'bpd'
- ], assigned_port))
+ server = mp.Process(
+ target=start_server,
+ args=(
+ [
+ "--library",
+ self.config["library"].as_filename(),
+ "--directory",
+ py3_path(self.libdir),
+ "--config",
+ py3_path(config_file.name),
+ "bpd",
+ ],
+ assigned_port,
+ ),
+ )
server.start()
try:
@@ -342,11 +377,12 @@ def run_bpd(self, host='localhost', password=None, do_hello=True,
def _assert_ok(self, *responses):
for response in responses:
self.assertTrue(response is not None)
- self.assertTrue(response.ok, 'Response failed: {}'.format(
- response.err_data))
+ self.assertTrue(
+ response.ok, "Response failed: {}".format(response.err_data)
+ )
def _assert_failed(self, response, code, pos=None):
- """ Check that a command failed with a specific error code. If this
+ """Check that a command failed with a specific error code. If this
is a list of responses, first check all preceding commands were OK.
"""
if pos is not None:
@@ -360,16 +396,18 @@ def _assert_failed(self, response, code, pos=None):
self.assertEqual(code, response.err_data[0])
def _bpd_add(self, client, *items, **kwargs):
- """ Add the given item to the BPD playlist or queue.
- """
- paths = ['/'.join([
- item.artist, item.album,
- py3_path(os.path.basename(item.path))]) for item in items]
- playlist = kwargs.get('playlist')
+ """Add the given item to the BPD playlist or queue."""
+ paths = [
+ "/".join(
+ [item.artist, item.album, py3_path(os.path.basename(item.path))]
+ )
+ for item in items
+ ]
+ playlist = kwargs.get("playlist")
if playlist:
- commands = [('playlistadd', playlist, path) for path in paths]
+ commands = [("playlistadd", playlist, path) for path in paths]
else:
- commands = [('add', path) for path in paths]
+ commands = [("add", path) for path in paths]
responses = client.send_commands(*commands)
self._assert_ok(*responses)
@@ -377,97 +415,116 @@ def _bpd_add(self, client, *items, **kwargs):
class BPDTest(BPDTestHelper):
def test_server_hello(self):
with self.run_bpd(do_hello=False) as client:
- self.assertEqual(client.readline(), b'OK MPD 0.16.0\n')
+ self.assertEqual(client.readline(), b"OK MPD 0.16.0\n")
def test_unknown_cmd(self):
with self.run_bpd() as client:
- response = client.send_command('notacommand')
+ response = client.send_command("notacommand")
self._assert_failed(response, bpd.ERROR_UNKNOWN)
def test_unexpected_argument(self):
with self.run_bpd() as client:
- response = client.send_command('ping', 'extra argument')
+ response = client.send_command("ping", "extra argument")
self._assert_failed(response, bpd.ERROR_ARG)
def test_missing_argument(self):
with self.run_bpd() as client:
- response = client.send_command('add')
+ response = client.send_command("add")
self._assert_failed(response, bpd.ERROR_ARG)
def test_system_error(self):
with self.run_bpd() as client:
- response = client.send_command('crash_TypeError')
+ response = client.send_command("crash_TypeError")
self._assert_failed(response, bpd.ERROR_SYSTEM)
def test_empty_request(self):
with self.run_bpd() as client:
- response = client.send_command('')
+ response = client.send_command("")
self._assert_failed(response, bpd.ERROR_UNKNOWN)
class BPDQueryTest(BPDTestHelper):
- test_implements_query = implements({
- 'clearerror',
- })
+ test_implements_query = implements(
+ {
+ "clearerror",
+ }
+ )
def test_cmd_currentsong(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1)
responses = client.send_commands(
- ('play',),
- ('currentsong',),
- ('stop',),
- ('currentsong',))
+ ("play",), ("currentsong",), ("stop",), ("currentsong",)
+ )
self._assert_ok(*responses)
- self.assertEqual('1', responses[1].data['Id'])
- self.assertNotIn('Id', responses[3].data)
+ self.assertEqual("1", responses[1].data["Id"])
+ self.assertNotIn("Id", responses[3].data)
def test_cmd_currentsong_tagtypes(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1)
- responses = client.send_commands(
- ('play',),
- ('currentsong',))
+ responses = client.send_commands(("play",), ("currentsong",))
self._assert_ok(*responses)
self.assertEqual(
- BPDConnectionTest.TAGTYPES.union(BPDQueueTest.METADATA),
- set(responses[1].data.keys()))
+ BPDConnectionTest.TAGTYPES.union(BPDQueueTest.METADATA),
+ set(responses[1].data.keys()),
+ )
def test_cmd_status(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('status',),
- ('play',),
- ('status',))
+ ("status",), ("play",), ("status",)
+ )
self._assert_ok(*responses)
fields_not_playing = {
- 'repeat', 'random', 'single', 'consume', 'playlist',
- 'playlistlength', 'mixrampdb', 'state',
- 'volume'
+ "repeat",
+ "random",
+ "single",
+ "consume",
+ "playlist",
+ "playlistlength",
+ "mixrampdb",
+ "state",
+ "volume",
}
self.assertEqual(fields_not_playing, set(responses[0].data.keys()))
fields_playing = fields_not_playing | {
- 'song', 'songid', 'time', 'elapsed', 'bitrate', 'duration',
- 'audio', 'nextsong', 'nextsongid'
+ "song",
+ "songid",
+ "time",
+ "elapsed",
+ "bitrate",
+ "duration",
+ "audio",
+ "nextsong",
+ "nextsongid",
}
self.assertEqual(fields_playing, set(responses[2].data.keys()))
def test_cmd_stats(self):
with self.run_bpd() as client:
- response = client.send_command('stats')
+ response = client.send_command("stats")
self._assert_ok(response)
- details = {'artists', 'albums', 'songs', 'uptime', 'db_playtime',
- 'db_update', 'playtime'}
+ details = {
+ "artists",
+ "albums",
+ "songs",
+ "uptime",
+ "db_playtime",
+ "db_update",
+ "playtime",
+ }
self.assertEqual(details, set(response.data.keys()))
def test_cmd_idle(self):
def _toggle(c):
for _ in range(3):
- rs = c.send_commands(('play',), ('pause',))
+ rs = c.send_commands(("play",), ("pause",))
# time.sleep(0.05) # uncomment if test is flaky
if any(not r.ok for r in rs):
- raise RuntimeError('Toggler failed')
+ raise RuntimeError("Toggler failed")
+
with self.run_bpd(second_client=True) as (client, client2):
self._bpd_add(client, self.item1, self.item2)
toggler = threading.Thread(target=_toggle, args=(client2,))
@@ -476,319 +533,348 @@ def _toggle(c):
# Since the client sockets have a 1s timeout set at worst this will
# raise a socket.timeout and fail the test if the toggler thread
# manages to finish before the idle command is sent here.
- response = client.send_command('idle', 'player')
+ response = client.send_command("idle", "player")
toggler.join()
self._assert_ok(response)
def test_cmd_idle_with_pending(self):
with self.run_bpd(second_client=True) as (client, client2):
- response1 = client.send_command('random', '1')
- response2 = client2.send_command('idle')
+ response1 = client.send_command("random", "1")
+ response2 = client2.send_command("idle")
self._assert_ok(response1, response2)
- self.assertEqual('options', response2.data['changed'])
+ self.assertEqual("options", response2.data["changed"])
def test_cmd_noidle(self):
with self.run_bpd() as client:
# Manually send a command without reading a response.
- request = client.serialise_command('idle')
+ request = client.serialise_command("idle")
client.sock.sendall(request)
time.sleep(0.01)
- response = client.send_command('noidle')
+ response = client.send_command("noidle")
self._assert_ok(response)
def test_cmd_noidle_when_not_idle(self):
with self.run_bpd() as client:
# Manually send a command without reading a response.
- request = client.serialise_command('noidle')
+ request = client.serialise_command("noidle")
client.sock.sendall(request)
- response = client.send_command('notacommand')
+ response = client.send_command("notacommand")
self._assert_failed(response, bpd.ERROR_UNKNOWN)
class BPDPlaybackTest(BPDTestHelper):
- test_implements_playback = implements({
- 'random',
- })
+ test_implements_playback = implements(
+ {
+ "random",
+ }
+ )
def test_cmd_consume(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('consume', '0'),
- ('playlistinfo',),
- ('next',),
- ('playlistinfo',),
- ('consume', '1'),
- ('playlistinfo',),
- ('play', '0'),
- ('next',),
- ('playlistinfo',),
- ('status',))
+ ("consume", "0"),
+ ("playlistinfo",),
+ ("next",),
+ ("playlistinfo",),
+ ("consume", "1"),
+ ("playlistinfo",),
+ ("play", "0"),
+ ("next",),
+ ("playlistinfo",),
+ ("status",),
+ )
self._assert_ok(*responses)
- self.assertEqual(responses[1].data['Id'], responses[3].data['Id'])
- self.assertEqual(['1', '2'], responses[5].data['Id'])
- self.assertEqual('2', responses[8].data['Id'])
- self.assertEqual('1', responses[9].data['consume'])
- self.assertEqual('play', responses[9].data['state'])
+ self.assertEqual(responses[1].data["Id"], responses[3].data["Id"])
+ self.assertEqual(["1", "2"], responses[5].data["Id"])
+ self.assertEqual("2", responses[8].data["Id"])
+ self.assertEqual("1", responses[9].data["consume"])
+ self.assertEqual("play", responses[9].data["state"])
def test_cmd_consume_in_reverse(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('consume', '1'),
- ('play', '1'),
- ('playlistinfo',),
- ('previous',),
- ('playlistinfo',),
- ('status',))
+ ("consume", "1"),
+ ("play", "1"),
+ ("playlistinfo",),
+ ("previous",),
+ ("playlistinfo",),
+ ("status",),
+ )
self._assert_ok(*responses)
- self.assertEqual(['1', '2'], responses[2].data['Id'])
- self.assertEqual('1', responses[4].data['Id'])
- self.assertEqual('play', responses[5].data['state'])
+ self.assertEqual(["1", "2"], responses[2].data["Id"])
+ self.assertEqual("1", responses[4].data["Id"])
+ self.assertEqual("play", responses[5].data["state"])
def test_cmd_single(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('status',),
- ('single', '1'),
- ('play',),
- ('status',),
- ('next',),
- ('status',))
+ ("status",),
+ ("single", "1"),
+ ("play",),
+ ("status",),
+ ("next",),
+ ("status",),
+ )
self._assert_ok(*responses)
- self.assertEqual('0', responses[0].data['single'])
- self.assertEqual('1', responses[3].data['single'])
- self.assertEqual('play', responses[3].data['state'])
- self.assertEqual('stop', responses[5].data['state'])
+ self.assertEqual("0", responses[0].data["single"])
+ self.assertEqual("1", responses[3].data["single"])
+ self.assertEqual("play", responses[3].data["state"])
+ self.assertEqual("stop", responses[5].data["state"])
def test_cmd_repeat(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('repeat', '1'),
- ('play',),
- ('currentsong',),
- ('next',),
- ('currentsong',),
- ('next',),
- ('currentsong',))
+ ("repeat", "1"),
+ ("play",),
+ ("currentsong",),
+ ("next",),
+ ("currentsong",),
+ ("next",),
+ ("currentsong",),
+ )
self._assert_ok(*responses)
- self.assertEqual('1', responses[2].data['Id'])
- self.assertEqual('2', responses[4].data['Id'])
- self.assertEqual('1', responses[6].data['Id'])
+ self.assertEqual("1", responses[2].data["Id"])
+ self.assertEqual("2", responses[4].data["Id"])
+ self.assertEqual("1", responses[6].data["Id"])
def test_cmd_repeat_with_single(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('repeat', '1'),
- ('single', '1'),
- ('play',),
- ('currentsong',),
- ('next',),
- ('status',),
- ('currentsong',))
+ ("repeat", "1"),
+ ("single", "1"),
+ ("play",),
+ ("currentsong",),
+ ("next",),
+ ("status",),
+ ("currentsong",),
+ )
self._assert_ok(*responses)
- self.assertEqual('1', responses[3].data['Id'])
- self.assertEqual('play', responses[5].data['state'])
- self.assertEqual('1', responses[6].data['Id'])
+ self.assertEqual("1", responses[3].data["Id"])
+ self.assertEqual("play", responses[5].data["state"])
+ self.assertEqual("1", responses[6].data["Id"])
def test_cmd_repeat_in_reverse(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('repeat', '1'),
- ('play',),
- ('currentsong',),
- ('previous',),
- ('currentsong',))
+ ("repeat", "1"),
+ ("play",),
+ ("currentsong",),
+ ("previous",),
+ ("currentsong",),
+ )
self._assert_ok(*responses)
- self.assertEqual('1', responses[2].data['Id'])
- self.assertEqual('2', responses[4].data['Id'])
+ self.assertEqual("1", responses[2].data["Id"])
+ self.assertEqual("2", responses[4].data["Id"])
def test_cmd_repeat_with_single_in_reverse(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('repeat', '1'),
- ('single', '1'),
- ('play',),
- ('currentsong',),
- ('previous',),
- ('status',),
- ('currentsong',))
+ ("repeat", "1"),
+ ("single", "1"),
+ ("play",),
+ ("currentsong",),
+ ("previous",),
+ ("status",),
+ ("currentsong",),
+ )
self._assert_ok(*responses)
- self.assertEqual('1', responses[3].data['Id'])
- self.assertEqual('play', responses[5].data['state'])
- self.assertEqual('1', responses[6].data['Id'])
+ self.assertEqual("1", responses[3].data["Id"])
+ self.assertEqual("play", responses[5].data["state"])
+ self.assertEqual("1", responses[6].data["Id"])
def test_cmd_crossfade(self):
with self.run_bpd() as client:
responses = client.send_commands(
- ('status',),
- ('crossfade', '123'),
- ('status',),
- ('crossfade', '-2'))
- response = client.send_command('crossfade', '0.5')
+ ("status",),
+ ("crossfade", "123"),
+ ("status",),
+ ("crossfade", "-2"),
+ )
+ response = client.send_command("crossfade", "0.5")
self._assert_failed(responses, bpd.ERROR_ARG, pos=3)
self._assert_failed(response, bpd.ERROR_ARG)
- self.assertNotIn('xfade', responses[0].data)
- self.assertAlmostEqual(123, int(responses[2].data['xfade']))
+ self.assertNotIn("xfade", responses[0].data)
+ self.assertAlmostEqual(123, int(responses[2].data["xfade"]))
def test_cmd_mixrampdb(self):
with self.run_bpd() as client:
- responses = client.send_commands(
- ('mixrampdb', '-17'),
- ('status',))
+ responses = client.send_commands(("mixrampdb", "-17"), ("status",))
self._assert_ok(*responses)
- self.assertAlmostEqual(-17, float(responses[1].data['mixrampdb']))
+ self.assertAlmostEqual(-17, float(responses[1].data["mixrampdb"]))
def test_cmd_mixrampdelay(self):
with self.run_bpd() as client:
responses = client.send_commands(
- ('mixrampdelay', '2'),
- ('status',),
- ('mixrampdelay', 'nan'),
- ('status',),
- ('mixrampdelay', '-2'))
+ ("mixrampdelay", "2"),
+ ("status",),
+ ("mixrampdelay", "nan"),
+ ("status",),
+ ("mixrampdelay", "-2"),
+ )
self._assert_failed(responses, bpd.ERROR_ARG, pos=4)
- self.assertAlmostEqual(2, float(responses[1].data['mixrampdelay']))
- self.assertNotIn('mixrampdelay', responses[3].data)
+ self.assertAlmostEqual(2, float(responses[1].data["mixrampdelay"]))
+ self.assertNotIn("mixrampdelay", responses[3].data)
def test_cmd_setvol(self):
with self.run_bpd() as client:
responses = client.send_commands(
- ('setvol', '67'),
- ('status',),
- ('setvol', '32'),
- ('status',),
- ('setvol', '101'))
+ ("setvol", "67"),
+ ("status",),
+ ("setvol", "32"),
+ ("status",),
+ ("setvol", "101"),
+ )
self._assert_failed(responses, bpd.ERROR_ARG, pos=4)
- self.assertEqual('67', responses[1].data['volume'])
- self.assertEqual('32', responses[3].data['volume'])
+ self.assertEqual("67", responses[1].data["volume"])
+ self.assertEqual("32", responses[3].data["volume"])
def test_cmd_volume(self):
with self.run_bpd() as client:
responses = client.send_commands(
- ('setvol', '10'),
- ('volume', '5'),
- ('volume', '-2'),
- ('status',))
+ ("setvol", "10"), ("volume", "5"), ("volume", "-2"), ("status",)
+ )
self._assert_ok(*responses)
- self.assertEqual('13', responses[3].data['volume'])
+ self.assertEqual("13", responses[3].data["volume"])
def test_cmd_replay_gain(self):
with self.run_bpd() as client:
responses = client.send_commands(
- ('replay_gain_mode', 'track'),
- ('replay_gain_status',),
- ('replay_gain_mode', 'notanoption'))
+ ("replay_gain_mode", "track"),
+ ("replay_gain_status",),
+ ("replay_gain_mode", "notanoption"),
+ )
self._assert_failed(responses, bpd.ERROR_ARG, pos=2)
- self.assertAlmostEqual('track', responses[1].data['replay_gain_mode'])
+ self.assertAlmostEqual("track", responses[1].data["replay_gain_mode"])
class BPDControlTest(BPDTestHelper):
- test_implements_control = implements({
- 'seek', 'seekid', 'seekcur',
- }, expectedFailure=True)
+ test_implements_control = implements(
+ {
+ "seek",
+ "seekid",
+ "seekcur",
+ },
+ expectedFailure=True,
+ )
def test_cmd_play(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('status',),
- ('play',),
- ('status',),
- ('play', '1'),
- ('currentsong',))
+ ("status",),
+ ("play",),
+ ("status",),
+ ("play", "1"),
+ ("currentsong",),
+ )
self._assert_ok(*responses)
- self.assertEqual('stop', responses[0].data['state'])
- self.assertEqual('play', responses[2].data['state'])
- self.assertEqual('2', responses[4].data['Id'])
+ self.assertEqual("stop", responses[0].data["state"])
+ self.assertEqual("play", responses[2].data["state"])
+ self.assertEqual("2", responses[4].data["Id"])
def test_cmd_playid(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('playid', '2'),
- ('currentsong',),
- ('clear',))
+ ("playid", "2"), ("currentsong",), ("clear",)
+ )
self._bpd_add(client, self.item2, self.item1)
- responses.extend(client.send_commands(
- ('playid', '2'),
- ('currentsong',)))
+ responses.extend(
+ client.send_commands(("playid", "2"), ("currentsong",))
+ )
self._assert_ok(*responses)
- self.assertEqual('2', responses[1].data['Id'])
- self.assertEqual('2', responses[4].data['Id'])
+ self.assertEqual("2", responses[1].data["Id"])
+ self.assertEqual("2", responses[4].data["Id"])
def test_cmd_pause(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1)
responses = client.send_commands(
- ('play',),
- ('pause',),
- ('status',),
- ('currentsong',))
+ ("play",), ("pause",), ("status",), ("currentsong",)
+ )
self._assert_ok(*responses)
- self.assertEqual('pause', responses[2].data['state'])
- self.assertEqual('1', responses[3].data['Id'])
+ self.assertEqual("pause", responses[2].data["state"])
+ self.assertEqual("1", responses[3].data["Id"])
def test_cmd_stop(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1)
responses = client.send_commands(
- ('play',),
- ('stop',),
- ('status',),
- ('currentsong',))
+ ("play",), ("stop",), ("status",), ("currentsong",)
+ )
self._assert_ok(*responses)
- self.assertEqual('stop', responses[2].data['state'])
- self.assertNotIn('Id', responses[3].data)
+ self.assertEqual("stop", responses[2].data["state"])
+ self.assertNotIn("Id", responses[3].data)
def test_cmd_next(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('play',),
- ('currentsong',),
- ('next',),
- ('currentsong',),
- ('next',),
- ('status',))
+ ("play",),
+ ("currentsong",),
+ ("next",),
+ ("currentsong",),
+ ("next",),
+ ("status",),
+ )
self._assert_ok(*responses)
- self.assertEqual('1', responses[1].data['Id'])
- self.assertEqual('2', responses[3].data['Id'])
- self.assertEqual('stop', responses[5].data['state'])
+ self.assertEqual("1", responses[1].data["Id"])
+ self.assertEqual("2", responses[3].data["Id"])
+ self.assertEqual("stop", responses[5].data["state"])
def test_cmd_previous(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('play', '1'),
- ('currentsong',),
- ('previous',),
- ('currentsong',),
- ('previous',),
- ('status',),
- ('currentsong',))
+ ("play", "1"),
+ ("currentsong",),
+ ("previous",),
+ ("currentsong",),
+ ("previous",),
+ ("status",),
+ ("currentsong",),
+ )
self._assert_ok(*responses)
- self.assertEqual('2', responses[1].data['Id'])
- self.assertEqual('1', responses[3].data['Id'])
- self.assertEqual('play', responses[5].data['state'])
- self.assertEqual('1', responses[6].data['Id'])
+ self.assertEqual("2", responses[1].data["Id"])
+ self.assertEqual("1", responses[3].data["Id"])
+ self.assertEqual("play", responses[5].data["state"])
+ self.assertEqual("1", responses[6].data["Id"])
class BPDQueueTest(BPDTestHelper):
- test_implements_queue = implements({
- 'addid', 'clear', 'delete', 'deleteid', 'move',
- 'moveid', 'playlist', 'playlistfind',
- 'playlistsearch', 'plchanges',
- 'plchangesposid', 'prio', 'prioid', 'rangeid', 'shuffle',
- 'swap', 'swapid', 'addtagid', 'cleartagid',
- }, expectedFailure=True)
-
- METADATA = {'Pos', 'Time', 'Id', 'file', 'duration'}
+ test_implements_queue = implements(
+ {
+ "addid",
+ "clear",
+ "delete",
+ "deleteid",
+ "move",
+ "moveid",
+ "playlist",
+ "playlistfind",
+ "playlistsearch",
+ "plchanges",
+ "plchangesposid",
+ "prio",
+ "prioid",
+ "rangeid",
+ "shuffle",
+ "swap",
+ "swapid",
+ "addtagid",
+ "cleartagid",
+ },
+ expectedFailure=True,
+ )
+
+ METADATA = {"Pos", "Time", "Id", "file", "duration"}
def test_cmd_add(self):
with self.run_bpd() as client:
@@ -798,248 +884,315 @@ def test_cmd_playlistinfo(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('playlistinfo',),
- ('playlistinfo', '0'),
- ('playlistinfo', '0:2'),
- ('playlistinfo', '200'))
+ ("playlistinfo",),
+ ("playlistinfo", "0"),
+ ("playlistinfo", "0:2"),
+ ("playlistinfo", "200"),
+ )
self._assert_failed(responses, bpd.ERROR_ARG, pos=3)
- self.assertEqual('1', responses[1].data['Id'])
- self.assertEqual(['1', '2'], responses[2].data['Id'])
+ self.assertEqual("1", responses[1].data["Id"])
+ self.assertEqual(["1", "2"], responses[2].data["Id"])
def test_cmd_playlistinfo_tagtypes(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1)
- response = client.send_command('playlistinfo', '0')
+ response = client.send_command("playlistinfo", "0")
self._assert_ok(response)
self.assertEqual(
- BPDConnectionTest.TAGTYPES.union(BPDQueueTest.METADATA),
- set(response.data.keys()))
+ BPDConnectionTest.TAGTYPES.union(BPDQueueTest.METADATA),
+ set(response.data.keys()),
+ )
def test_cmd_playlistid(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1, self.item2)
responses = client.send_commands(
- ('playlistid', '2'),
- ('playlistid',))
+ ("playlistid", "2"), ("playlistid",)
+ )
self._assert_ok(*responses)
- self.assertEqual('Track Two Title', responses[0].data['Title'])
- self.assertEqual(['1', '2'], responses[1].data['Track'])
+ self.assertEqual("Track Two Title", responses[0].data["Title"])
+ self.assertEqual(["1", "2"], responses[1].data["Track"])
class BPDPlaylistsTest(BPDTestHelper):
- test_implements_playlists = implements({'playlistadd'})
+ test_implements_playlists = implements({"playlistadd"})
def test_cmd_listplaylist(self):
with self.run_bpd() as client:
- response = client.send_command('listplaylist', 'anything')
+ response = client.send_command("listplaylist", "anything")
self._assert_failed(response, bpd.ERROR_NO_EXIST)
def test_cmd_listplaylistinfo(self):
with self.run_bpd() as client:
- response = client.send_command('listplaylistinfo', 'anything')
+ response = client.send_command("listplaylistinfo", "anything")
self._assert_failed(response, bpd.ERROR_NO_EXIST)
def test_cmd_listplaylists(self):
with self.run_bpd() as client:
- response = client.send_command('listplaylists')
+ response = client.send_command("listplaylists")
self._assert_failed(response, bpd.ERROR_UNKNOWN)
def test_cmd_load(self):
with self.run_bpd() as client:
- response = client.send_command('load', 'anything')
+ response = client.send_command("load", "anything")
self._assert_failed(response, bpd.ERROR_NO_EXIST)
@unittest.skip
def test_cmd_playlistadd(self):
with self.run_bpd() as client:
- self._bpd_add(client, self.item1, playlist='anything')
+ self._bpd_add(client, self.item1, playlist="anything")
def test_cmd_playlistclear(self):
with self.run_bpd() as client:
- response = client.send_command('playlistclear', 'anything')
+ response = client.send_command("playlistclear", "anything")
self._assert_failed(response, bpd.ERROR_UNKNOWN)
def test_cmd_playlistdelete(self):
with self.run_bpd() as client:
- response = client.send_command('playlistdelete', 'anything', '0')
+ response = client.send_command("playlistdelete", "anything", "0")
self._assert_failed(response, bpd.ERROR_UNKNOWN)
def test_cmd_playlistmove(self):
with self.run_bpd() as client:
- response = client.send_command(
- 'playlistmove', 'anything', '0', '1')
+ response = client.send_command("playlistmove", "anything", "0", "1")
self._assert_failed(response, bpd.ERROR_UNKNOWN)
def test_cmd_rename(self):
with self.run_bpd() as client:
- response = client.send_command('rename', 'anything', 'newname')
+ response = client.send_command("rename", "anything", "newname")
self._assert_failed(response, bpd.ERROR_UNKNOWN)
def test_cmd_rm(self):
with self.run_bpd() as client:
- response = client.send_command('rm', 'anything')
+ response = client.send_command("rm", "anything")
self._assert_failed(response, bpd.ERROR_UNKNOWN)
def test_cmd_save(self):
with self.run_bpd() as client:
self._bpd_add(client, self.item1)
- response = client.send_command('save', 'newplaylist')
+ response = client.send_command("save", "newplaylist")
self._assert_failed(response, bpd.ERROR_UNKNOWN)
class BPDDatabaseTest(BPDTestHelper):
- test_implements_database = implements({
- 'albumart', 'find', 'findadd', 'listall',
- 'listallinfo', 'listfiles', 'readcomments',
- 'searchadd', 'searchaddpl', 'update', 'rescan',
- }, expectedFailure=True)
+ test_implements_database = implements(
+ {
+ "albumart",
+ "find",
+ "findadd",
+ "listall",
+ "listallinfo",
+ "listfiles",
+ "readcomments",
+ "searchadd",
+ "searchaddpl",
+ "update",
+ "rescan",
+ },
+ expectedFailure=True,
+ )
def test_cmd_search(self):
with self.run_bpd() as client:
- response = client.send_command('search', 'track', '1')
+ response = client.send_command("search", "track", "1")
self._assert_ok(response)
- self.assertEqual(self.item1.title, response.data['Title'])
+ self.assertEqual(self.item1.title, response.data["Title"])
def test_cmd_list(self):
with self.run_bpd() as client:
responses = client.send_commands(
- ('list', 'album'),
- ('list', 'track'),
- ('list', 'album', 'artist', 'Artist Name', 'track'))
+ ("list", "album"),
+ ("list", "track"),
+ ("list", "album", "artist", "Artist Name", "track"),
+ )
self._assert_failed(responses, bpd.ERROR_ARG, pos=2)
- self.assertEqual('Album Title', responses[0].data['Album'])
- self.assertEqual(['1', '2'], responses[1].data['Track'])
+ self.assertEqual("Album Title", responses[0].data["Album"])
+ self.assertEqual(["1", "2"], responses[1].data["Track"])
def test_cmd_list_three_arg_form(self):
with self.run_bpd() as client:
responses = client.send_commands(
- ('list', 'album', 'artist', 'Artist Name'),
- ('list', 'album', 'Artist Name'),
- ('list', 'track', 'Artist Name'))
+ ("list", "album", "artist", "Artist Name"),
+ ("list", "album", "Artist Name"),
+ ("list", "track", "Artist Name"),
+ )
self._assert_failed(responses, bpd.ERROR_ARG, pos=2)
self.assertEqual(responses[0].data, responses[1].data)
def test_cmd_lsinfo(self):
with self.run_bpd() as client:
- response1 = client.send_command('lsinfo')
+ response1 = client.send_command("lsinfo")
self._assert_ok(response1)
response2 = client.send_command(
- 'lsinfo', response1.data['directory'])
+ "lsinfo", response1.data["directory"]
+ )
self._assert_ok(response2)
response3 = client.send_command(
- 'lsinfo', response2.data['directory'])
+ "lsinfo", response2.data["directory"]
+ )
self._assert_ok(response3)
- self.assertIn(self.item1.title, response3.data['Title'])
+ self.assertIn(self.item1.title, response3.data["Title"])
def test_cmd_count(self):
with self.run_bpd() as client:
- response = client.send_command('count', 'track', '1')
+ response = client.send_command("count", "track", "1")
self._assert_ok(response)
- self.assertEqual('1', response.data['songs'])
- self.assertEqual('0', response.data['playtime'])
+ self.assertEqual("1", response.data["songs"])
+ self.assertEqual("0", response.data["playtime"])
class BPDMountsTest(BPDTestHelper):
- test_implements_mounts = implements({
- 'mount', 'unmount', 'listmounts', 'listneighbors',
- }, expectedFailure=True)
+ test_implements_mounts = implements(
+ {
+ "mount",
+ "unmount",
+ "listmounts",
+ "listneighbors",
+ },
+ expectedFailure=True,
+ )
class BPDStickerTest(BPDTestHelper):
- test_implements_stickers = implements({
- 'sticker',
- }, expectedFailure=True)
+ test_implements_stickers = implements(
+ {
+ "sticker",
+ },
+ expectedFailure=True,
+ )
class BPDConnectionTest(BPDTestHelper):
- test_implements_connection = implements({
- 'close', 'kill',
- })
+ test_implements_connection = implements(
+ {
+ "close",
+ "kill",
+ }
+ )
ALL_MPD_TAGTYPES = {
- 'Artist', 'ArtistSort', 'Album', 'AlbumSort', 'AlbumArtist',
- 'AlbumArtistSort', 'Title', 'Track', 'Name', 'Genre', 'Date',
- 'Composer', 'Performer', 'Comment', 'Disc', 'Label',
- 'OriginalDate', 'MUSICBRAINZ_ARTISTID', 'MUSICBRAINZ_ALBUMID',
- 'MUSICBRAINZ_ALBUMARTISTID', 'MUSICBRAINZ_TRACKID',
- 'MUSICBRAINZ_RELEASETRACKID', 'MUSICBRAINZ_WORKID',
+ "Artist",
+ "ArtistSort",
+ "Album",
+ "AlbumSort",
+ "AlbumArtist",
+ "AlbumArtistSort",
+ "Title",
+ "Track",
+ "Name",
+ "Genre",
+ "Date",
+ "Composer",
+ "Performer",
+ "Comment",
+ "Disc",
+ "Label",
+ "OriginalDate",
+ "MUSICBRAINZ_ARTISTID",
+ "MUSICBRAINZ_ALBUMID",
+ "MUSICBRAINZ_ALBUMARTISTID",
+ "MUSICBRAINZ_TRACKID",
+ "MUSICBRAINZ_RELEASETRACKID",
+ "MUSICBRAINZ_WORKID",
}
UNSUPPORTED_TAGTYPES = {
- 'MUSICBRAINZ_WORKID', # not tracked by beets
- 'Performer', # not tracked by beets
- 'AlbumSort', # not tracked by beets
- 'Name', # junk field for internet radio
+ "MUSICBRAINZ_WORKID", # not tracked by beets
+ "Performer", # not tracked by beets
+ "AlbumSort", # not tracked by beets
+ "Name", # junk field for internet radio
}
TAGTYPES = ALL_MPD_TAGTYPES.difference(UNSUPPORTED_TAGTYPES)
def test_cmd_password(self):
- with self.run_bpd(password='abc123') as client:
- response = client.send_command('status')
+ with self.run_bpd(password="abc123") as client:
+ response = client.send_command("status")
self._assert_failed(response, bpd.ERROR_PERMISSION)
- response = client.send_command('password', 'wrong')
+ response = client.send_command("password", "wrong")
self._assert_failed(response, bpd.ERROR_PASSWORD)
responses = client.send_commands(
- ('password', 'abc123'),
- ('status',))
+ ("password", "abc123"), ("status",)
+ )
self._assert_ok(*responses)
def test_cmd_ping(self):
with self.run_bpd() as client:
- response = client.send_command('ping')
+ response = client.send_command("ping")
self._assert_ok(response)
def test_cmd_tagtypes(self):
with self.run_bpd() as client:
- response = client.send_command('tagtypes')
+ response = client.send_command("tagtypes")
self._assert_ok(response)
- self.assertEqual(
- self.TAGTYPES,
- set(response.data['tagtype']))
+ self.assertEqual(self.TAGTYPES, set(response.data["tagtype"]))
@unittest.skip
def test_tagtypes_mask(self):
with self.run_bpd() as client:
- response = client.send_command('tagtypes', 'clear')
+ response = client.send_command("tagtypes", "clear")
self._assert_ok(response)
class BPDPartitionTest(BPDTestHelper):
- test_implements_partitions = implements({
- 'partition', 'listpartitions', 'newpartition',
- }, expectedFailure=True)
+ test_implements_partitions = implements(
+ {
+ "partition",
+ "listpartitions",
+ "newpartition",
+ },
+ expectedFailure=True,
+ )
class BPDDeviceTest(BPDTestHelper):
- test_implements_devices = implements({
- 'disableoutput', 'enableoutput', 'toggleoutput', 'outputs',
- }, expectedFailure=True)
+ test_implements_devices = implements(
+ {
+ "disableoutput",
+ "enableoutput",
+ "toggleoutput",
+ "outputs",
+ },
+ expectedFailure=True,
+ )
class BPDReflectionTest(BPDTestHelper):
- test_implements_reflection = implements({
- 'config', 'commands', 'notcommands', 'urlhandlers',
- }, expectedFailure=True)
+ test_implements_reflection = implements(
+ {
+ "config",
+ "commands",
+ "notcommands",
+ "urlhandlers",
+ },
+ expectedFailure=True,
+ )
def test_cmd_decoders(self):
with self.run_bpd() as client:
- response = client.send_command('decoders')
+ response = client.send_command("decoders")
self._assert_ok(response)
- self.assertEqual('default', response.data['plugin'])
- self.assertEqual('mp3', response.data['suffix'])
- self.assertEqual('audio/mpeg', response.data['mime_type'])
+ self.assertEqual("default", response.data["plugin"])
+ self.assertEqual("mp3", response.data["suffix"])
+ self.assertEqual("audio/mpeg", response.data["mime_type"])
class BPDPeersTest(BPDTestHelper):
- test_implements_peers = implements({
- 'subscribe', 'unsubscribe', 'channels', 'readmessages',
- 'sendmessage',
- }, expectedFailure=True)
+ test_implements_peers = implements(
+ {
+ "subscribe",
+ "unsubscribe",
+ "channels",
+ "readmessages",
+ "sendmessage",
+ },
+ expectedFailure=True,
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_playlist.py b/test/plugins/test_playlist.py
index 74783fde8d..21bf6491fb 100644
--- a/test/plugins/test_playlist.py
+++ b/test/plugins/test_playlist.py
@@ -17,10 +17,8 @@
import shutil
import tempfile
import unittest
-
from shlex import quote
-from test import _common
-from test import helper
+from test import _common, helper
import beets
@@ -28,46 +26,58 @@
class PlaylistTestHelper(helper.TestHelper):
def setUp(self):
self.setup_beets()
- self.lib = beets.library.Library(':memory:')
+ self.lib = beets.library.Library(":memory:")
- self.music_dir = os.path.expanduser(os.path.join('~', 'Music'))
+ self.music_dir = os.path.expanduser(os.path.join("~", "Music"))
i1 = _common.item()
- i1.path = beets.util.normpath(os.path.join(
- self.music_dir,
- 'a', 'b', 'c.mp3',
- ))
- i1.title = 'some item'
- i1.album = 'some album'
+ i1.path = beets.util.normpath(
+ os.path.join(
+ self.music_dir,
+ "a",
+ "b",
+ "c.mp3",
+ )
+ )
+ i1.title = "some item"
+ i1.album = "some album"
self.lib.add(i1)
self.lib.add_album([i1])
i2 = _common.item()
- i2.path = beets.util.normpath(os.path.join(
- self.music_dir,
- 'd', 'e', 'f.mp3',
- ))
- i2.title = 'another item'
- i2.album = 'another album'
+ i2.path = beets.util.normpath(
+ os.path.join(
+ self.music_dir,
+ "d",
+ "e",
+ "f.mp3",
+ )
+ )
+ i2.title = "another item"
+ i2.album = "another album"
self.lib.add(i2)
self.lib.add_album([i2])
i3 = _common.item()
- i3.path = beets.util.normpath(os.path.join(
- self.music_dir,
- 'x', 'y', 'z.mp3',
- ))
- i3.title = 'yet another item'
- i3.album = 'yet another album'
+ i3.path = beets.util.normpath(
+ os.path.join(
+ self.music_dir,
+ "x",
+ "y",
+ "z.mp3",
+ )
+ )
+ i3.title = "yet another item"
+ i3.album = "yet another album"
self.lib.add(i3)
self.lib.add_album([i3])
self.playlist_dir = tempfile.mkdtemp()
- self.config['directory'] = self.music_dir
- self.config['playlist']['playlist_dir'] = self.playlist_dir
+ self.config["directory"] = self.music_dir
+ self.config["playlist"]["playlist_dir"] = self.playlist_dir
self.setup_test()
- self.load_plugins('playlist')
+ self.load_plugins("playlist")
def setup_test(self):
raise NotImplementedError
@@ -80,228 +90,308 @@ def tearDown(self):
class PlaylistQueryTestHelper(PlaylistTestHelper):
def test_name_query_with_absolute_paths_in_playlist(self):
- q = 'playlist:absolute'
+ q = "playlist:absolute"
results = self.lib.items(q)
- self.assertEqual({i.title for i in results}, {
- 'some item',
- 'another item',
- })
+ self.assertEqual(
+ {i.title for i in results},
+ {
+ "some item",
+ "another item",
+ },
+ )
def test_path_query_with_absolute_paths_in_playlist(self):
- q = 'playlist:{}'.format(quote(os.path.join(
- self.playlist_dir,
- 'absolute.m3u',
- )))
+ q = "playlist:{}".format(
+ quote(
+ os.path.join(
+ self.playlist_dir,
+ "absolute.m3u",
+ )
+ )
+ )
results = self.lib.items(q)
- self.assertEqual({i.title for i in results}, {
- 'some item',
- 'another item',
- })
+ self.assertEqual(
+ {i.title for i in results},
+ {
+ "some item",
+ "another item",
+ },
+ )
def test_name_query_with_relative_paths_in_playlist(self):
- q = 'playlist:relative'
+ q = "playlist:relative"
results = self.lib.items(q)
- self.assertEqual({i.title for i in results}, {
- 'some item',
- 'another item',
- })
+ self.assertEqual(
+ {i.title for i in results},
+ {
+ "some item",
+ "another item",
+ },
+ )
def test_path_query_with_relative_paths_in_playlist(self):
- q = 'playlist:{}'.format(quote(os.path.join(
- self.playlist_dir,
- 'relative.m3u',
- )))
+ q = "playlist:{}".format(
+ quote(
+ os.path.join(
+ self.playlist_dir,
+ "relative.m3u",
+ )
+ )
+ )
results = self.lib.items(q)
- self.assertEqual({i.title for i in results}, {
- 'some item',
- 'another item',
- })
+ self.assertEqual(
+ {i.title for i in results},
+ {
+ "some item",
+ "another item",
+ },
+ )
def test_name_query_with_nonexisting_playlist(self):
- q = 'playlist:nonexisting'
+ q = "playlist:nonexisting"
results = self.lib.items(q)
self.assertEqual(set(results), set())
def test_path_query_with_nonexisting_playlist(self):
- q = 'playlist:{}'.format(quote(os.path.join(
- self.playlist_dir,
- self.playlist_dir,
- 'nonexisting.m3u',
- )))
+ q = "playlist:{}".format(
+ quote(
+ os.path.join(
+ self.playlist_dir,
+ self.playlist_dir,
+ "nonexisting.m3u",
+ )
+ )
+ )
results = self.lib.items(q)
self.assertEqual(set(results), set())
class PlaylistTestRelativeToLib(PlaylistQueryTestHelper, unittest.TestCase):
def setup_test(self):
- with open(os.path.join(self.playlist_dir, 'absolute.m3u'), 'w') as f:
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'a', 'b', 'c.mp3')))
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'd', 'e', 'f.mp3')))
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'nonexisting.mp3')))
+ with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f:
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "a", "b", "c.mp3"))
+ )
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "d", "e", "f.mp3"))
+ )
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "nonexisting.mp3"))
+ )
- with open(os.path.join(self.playlist_dir, 'relative.m3u'), 'w') as f:
- f.write('{}\n'.format(os.path.join('a', 'b', 'c.mp3')))
- f.write('{}\n'.format(os.path.join('d', 'e', 'f.mp3')))
- f.write('{}\n'.format('nonexisting.mp3'))
+ with open(os.path.join(self.playlist_dir, "relative.m3u"), "w") as f:
+ f.write("{}\n".format(os.path.join("a", "b", "c.mp3")))
+ f.write("{}\n".format(os.path.join("d", "e", "f.mp3")))
+ f.write("{}\n".format("nonexisting.mp3"))
- self.config['playlist']['relative_to'] = 'library'
+ self.config["playlist"]["relative_to"] = "library"
class PlaylistTestRelativeToDir(PlaylistQueryTestHelper, unittest.TestCase):
def setup_test(self):
- with open(os.path.join(self.playlist_dir, 'absolute.m3u'), 'w') as f:
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'a', 'b', 'c.mp3')))
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'd', 'e', 'f.mp3')))
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'nonexisting.mp3')))
+ with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f:
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "a", "b", "c.mp3"))
+ )
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "d", "e", "f.mp3"))
+ )
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "nonexisting.mp3"))
+ )
- with open(os.path.join(self.playlist_dir, 'relative.m3u'), 'w') as f:
- f.write('{}\n'.format(os.path.join('a', 'b', 'c.mp3')))
- f.write('{}\n'.format(os.path.join('d', 'e', 'f.mp3')))
- f.write('{}\n'.format('nonexisting.mp3'))
+ with open(os.path.join(self.playlist_dir, "relative.m3u"), "w") as f:
+ f.write("{}\n".format(os.path.join("a", "b", "c.mp3")))
+ f.write("{}\n".format(os.path.join("d", "e", "f.mp3")))
+ f.write("{}\n".format("nonexisting.mp3"))
- self.config['playlist']['relative_to'] = self.music_dir
+ self.config["playlist"]["relative_to"] = self.music_dir
class PlaylistTestRelativeToPls(PlaylistQueryTestHelper, unittest.TestCase):
def setup_test(self):
- with open(os.path.join(self.playlist_dir, 'absolute.m3u'), 'w') as f:
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'a', 'b', 'c.mp3')))
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'd', 'e', 'f.mp3')))
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'nonexisting.mp3')))
-
- with open(os.path.join(self.playlist_dir, 'relative.m3u'), 'w') as f:
- f.write('{}\n'.format(os.path.relpath(
- os.path.join(self.music_dir, 'a', 'b', 'c.mp3'),
- start=self.playlist_dir,
- )))
- f.write('{}\n'.format(os.path.relpath(
- os.path.join(self.music_dir, 'd', 'e', 'f.mp3'),
- start=self.playlist_dir,
- )))
- f.write('{}\n'.format(os.path.relpath(
- os.path.join(self.music_dir, 'nonexisting.mp3'),
- start=self.playlist_dir,
- )))
-
- self.config['playlist']['relative_to'] = 'playlist'
- self.config['playlist']['playlist_dir'] = self.playlist_dir
+ with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f:
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "a", "b", "c.mp3"))
+ )
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "d", "e", "f.mp3"))
+ )
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "nonexisting.mp3"))
+ )
+
+ with open(os.path.join(self.playlist_dir, "relative.m3u"), "w") as f:
+ f.write(
+ "{}\n".format(
+ os.path.relpath(
+ os.path.join(self.music_dir, "a", "b", "c.mp3"),
+ start=self.playlist_dir,
+ )
+ )
+ )
+ f.write(
+ "{}\n".format(
+ os.path.relpath(
+ os.path.join(self.music_dir, "d", "e", "f.mp3"),
+ start=self.playlist_dir,
+ )
+ )
+ )
+ f.write(
+ "{}\n".format(
+ os.path.relpath(
+ os.path.join(self.music_dir, "nonexisting.mp3"),
+ start=self.playlist_dir,
+ )
+ )
+ )
+
+ self.config["playlist"]["relative_to"] = "playlist"
+ self.config["playlist"]["playlist_dir"] = self.playlist_dir
class PlaylistUpdateTestHelper(PlaylistTestHelper):
def setup_test(self):
- with open(os.path.join(self.playlist_dir, 'absolute.m3u'), 'w') as f:
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'a', 'b', 'c.mp3')))
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'd', 'e', 'f.mp3')))
- f.write('{}\n'.format(os.path.join(
- self.music_dir, 'nonexisting.mp3')))
-
- with open(os.path.join(self.playlist_dir, 'relative.m3u'), 'w') as f:
- f.write('{}\n'.format(os.path.join('a', 'b', 'c.mp3')))
- f.write('{}\n'.format(os.path.join('d', 'e', 'f.mp3')))
- f.write('{}\n'.format('nonexisting.mp3'))
-
- self.config['playlist']['auto'] = True
- self.config['playlist']['relative_to'] = 'library'
+ with open(os.path.join(self.playlist_dir, "absolute.m3u"), "w") as f:
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "a", "b", "c.mp3"))
+ )
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "d", "e", "f.mp3"))
+ )
+ f.write(
+ "{}\n".format(os.path.join(self.music_dir, "nonexisting.mp3"))
+ )
+
+ with open(os.path.join(self.playlist_dir, "relative.m3u"), "w") as f:
+ f.write("{}\n".format(os.path.join("a", "b", "c.mp3")))
+ f.write("{}\n".format(os.path.join("d", "e", "f.mp3")))
+ f.write("{}\n".format("nonexisting.mp3"))
+
+ self.config["playlist"]["auto"] = True
+ self.config["playlist"]["relative_to"] = "library"
class PlaylistTestItemMoved(PlaylistUpdateTestHelper, unittest.TestCase):
def test_item_moved(self):
# Emit item_moved event for an item that is in a playlist
- results = self.lib.items('path:{}'.format(quote(
- os.path.join(self.music_dir, 'd', 'e', 'f.mp3'))))
+ results = self.lib.items(
+ "path:{}".format(
+ quote(os.path.join(self.music_dir, "d", "e", "f.mp3"))
+ )
+ )
item = results[0]
beets.plugins.send(
- 'item_moved', item=item, source=item.path,
+ "item_moved",
+ item=item,
+ source=item.path,
destination=beets.util.bytestring_path(
- os.path.join(self.music_dir, 'g', 'h', 'i.mp3')))
+ os.path.join(self.music_dir, "g", "h", "i.mp3")
+ ),
+ )
# Emit item_moved event for an item that is not in a playlist
- results = self.lib.items('path:{}'.format(quote(
- os.path.join(self.music_dir, 'x', 'y', 'z.mp3'))))
+ results = self.lib.items(
+ "path:{}".format(
+ quote(os.path.join(self.music_dir, "x", "y", "z.mp3"))
+ )
+ )
item = results[0]
beets.plugins.send(
- 'item_moved', item=item, source=item.path,
+ "item_moved",
+ item=item,
+ source=item.path,
destination=beets.util.bytestring_path(
- os.path.join(self.music_dir, 'u', 'v', 'w.mp3')))
+ os.path.join(self.music_dir, "u", "v", "w.mp3")
+ ),
+ )
# Emit cli_exit event
- beets.plugins.send('cli_exit', lib=self.lib)
+ beets.plugins.send("cli_exit", lib=self.lib)
# Check playlist with absolute paths
- playlist_path = os.path.join(self.playlist_dir, 'absolute.m3u')
+ playlist_path = os.path.join(self.playlist_dir, "absolute.m3u")
with open(playlist_path) as f:
lines = [line.strip() for line in f.readlines()]
- self.assertEqual(lines, [
- os.path.join(self.music_dir, 'a', 'b', 'c.mp3'),
- os.path.join(self.music_dir, 'g', 'h', 'i.mp3'),
- os.path.join(self.music_dir, 'nonexisting.mp3'),
- ])
+ self.assertEqual(
+ lines,
+ [
+ os.path.join(self.music_dir, "a", "b", "c.mp3"),
+ os.path.join(self.music_dir, "g", "h", "i.mp3"),
+ os.path.join(self.music_dir, "nonexisting.mp3"),
+ ],
+ )
# Check playlist with relative paths
- playlist_path = os.path.join(self.playlist_dir, 'relative.m3u')
+ playlist_path = os.path.join(self.playlist_dir, "relative.m3u")
with open(playlist_path) as f:
lines = [line.strip() for line in f.readlines()]
- self.assertEqual(lines, [
- os.path.join('a', 'b', 'c.mp3'),
- os.path.join('g', 'h', 'i.mp3'),
- 'nonexisting.mp3',
- ])
+ self.assertEqual(
+ lines,
+ [
+ os.path.join("a", "b", "c.mp3"),
+ os.path.join("g", "h", "i.mp3"),
+ "nonexisting.mp3",
+ ],
+ )
class PlaylistTestItemRemoved(PlaylistUpdateTestHelper, unittest.TestCase):
def test_item_removed(self):
# Emit item_removed event for an item that is in a playlist
- results = self.lib.items('path:{}'.format(quote(
- os.path.join(self.music_dir, 'd', 'e', 'f.mp3'))))
+ results = self.lib.items(
+ "path:{}".format(
+ quote(os.path.join(self.music_dir, "d", "e", "f.mp3"))
+ )
+ )
item = results[0]
- beets.plugins.send('item_removed', item=item)
+ beets.plugins.send("item_removed", item=item)
# Emit item_removed event for an item that is not in a playlist
- results = self.lib.items('path:{}'.format(quote(
- os.path.join(self.music_dir, 'x', 'y', 'z.mp3'))))
+ results = self.lib.items(
+ "path:{}".format(
+ quote(os.path.join(self.music_dir, "x", "y", "z.mp3"))
+ )
+ )
item = results[0]
- beets.plugins.send('item_removed', item=item)
+ beets.plugins.send("item_removed", item=item)
# Emit cli_exit event
- beets.plugins.send('cli_exit', lib=self.lib)
+ beets.plugins.send("cli_exit", lib=self.lib)
# Check playlist with absolute paths
- playlist_path = os.path.join(self.playlist_dir, 'absolute.m3u')
+ playlist_path = os.path.join(self.playlist_dir, "absolute.m3u")
with open(playlist_path) as f:
lines = [line.strip() for line in f.readlines()]
- self.assertEqual(lines, [
- os.path.join(self.music_dir, 'a', 'b', 'c.mp3'),
- os.path.join(self.music_dir, 'nonexisting.mp3'),
- ])
+ self.assertEqual(
+ lines,
+ [
+ os.path.join(self.music_dir, "a", "b", "c.mp3"),
+ os.path.join(self.music_dir, "nonexisting.mp3"),
+ ],
+ )
# Check playlist with relative paths
- playlist_path = os.path.join(self.playlist_dir, 'relative.m3u')
+ playlist_path = os.path.join(self.playlist_dir, "relative.m3u")
with open(playlist_path) as f:
lines = [line.strip() for line in f.readlines()]
- self.assertEqual(lines, [
- os.path.join('a', 'b', 'c.mp3'),
- 'nonexisting.mp3',
- ])
+ self.assertEqual(
+ lines,
+ [
+ os.path.join("a", "b", "c.mp3"),
+ "nonexisting.mp3",
+ ],
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_plexupdate.py b/test/plugins/test_plexupdate.py
index 2d725a4f68..2571415e70 100644
--- a/test/plugins/test_plexupdate.py
+++ b/test/plugins/test_plexupdate.py
@@ -1,13 +1,14 @@
-from test.helper import TestHelper
-from beetsplug.plexupdate import get_music_section, update_plex
import unittest
+from test.helper import TestHelper
+
import responses
+from beetsplug.plexupdate import get_music_section, update_plex
+
class PlexUpdateTest(unittest.TestCase, TestHelper):
- def add_response_get_music_section(self, section_name='Music'):
- """Create response for mocking the get_music_section function.
- """
+ def add_response_get_music_section(self, section_name="Music"):
+ """Create response for mocking the get_music_section function."""
escaped_section_name = section_name.replace('"', '\\"')
@@ -25,7 +26,7 @@ def add_response_get_music_section(self, section_name='Music'):
'language="de" uuid="92f68526-21eb-4ee2-8e22-d36355a17f1f" '
'updatedAt="1416232668" createdAt="1415720680">'
''
- ''
+ ""
''
''
- ''
+ ""
''
''
- ''
- '')
+ ""
+ ""
+ )
status = 200
- content_type = 'text/xml;charset=utf-8'
+ content_type = "text/xml;charset=utf-8"
- responses.add(responses.GET,
- 'http://localhost:32400/library/sections',
- body=body,
- status=status,
- content_type=content_type)
+ responses.add(
+ responses.GET,
+ "http://localhost:32400/library/sections",
+ body=body,
+ status=status,
+ content_type=content_type,
+ )
def add_response_update_plex(self):
- """Create response for mocking the update_plex function.
- """
- body = ''
+ """Create response for mocking the update_plex function."""
+ body = ""
status = 200
- content_type = 'text/html'
+ content_type = "text/html"
- responses.add(responses.GET,
- 'http://localhost:32400/library/sections/2/refresh',
- body=body,
- status=status,
- content_type=content_type)
+ responses.add(
+ responses.GET,
+ "http://localhost:32400/library/sections/2/refresh",
+ body=body,
+ status=status,
+ content_type=content_type,
+ )
def setUp(self):
self.setup_beets()
- self.load_plugins('plexupdate')
+ self.load_plugins("plexupdate")
- self.config['plex'] = {
- 'host': 'localhost',
- 'port': 32400}
+ self.config["plex"] = {"host": "localhost", "port": 32400}
def tearDown(self):
self.teardown_beets()
@@ -85,26 +88,34 @@ def test_get_music_section(self):
self.add_response_get_music_section()
# Test if section key is "2" out of the mocking data.
- self.assertEqual(get_music_section(
- self.config['plex']['host'],
- self.config['plex']['port'],
- self.config['plex']['token'],
- self.config['plex']['library_name'].get(),
- self.config['plex']['secure'],
- self.config['plex']['ignore_cert_errors']), '2')
+ self.assertEqual(
+ get_music_section(
+ self.config["plex"]["host"],
+ self.config["plex"]["port"],
+ self.config["plex"]["token"],
+ self.config["plex"]["library_name"].get(),
+ self.config["plex"]["secure"],
+ self.config["plex"]["ignore_cert_errors"],
+ ),
+ "2",
+ )
@responses.activate
def test_get_named_music_section(self):
# Adding response.
- self.add_response_get_music_section('My Music Library')
-
- self.assertEqual(get_music_section(
- self.config['plex']['host'],
- self.config['plex']['port'],
- self.config['plex']['token'],
- 'My Music Library',
- self.config['plex']['secure'],
- self.config['plex']['ignore_cert_errors']), '2')
+ self.add_response_get_music_section("My Music Library")
+
+ self.assertEqual(
+ get_music_section(
+ self.config["plex"]["host"],
+ self.config["plex"]["port"],
+ self.config["plex"]["token"],
+ "My Music Library",
+ self.config["plex"]["secure"],
+ self.config["plex"]["ignore_cert_errors"],
+ ),
+ "2",
+ )
@responses.activate
def test_update_plex(self):
@@ -113,18 +124,22 @@ def test_update_plex(self):
self.add_response_update_plex()
# Testing status code of the mocking request.
- self.assertEqual(update_plex(
- self.config['plex']['host'],
- self.config['plex']['port'],
- self.config['plex']['token'],
- self.config['plex']['library_name'].get(),
- self.config['plex']['secure'],
- self.config['plex']['ignore_cert_errors']).status_code, 200)
+ self.assertEqual(
+ update_plex(
+ self.config["plex"]["host"],
+ self.config["plex"]["port"],
+ self.config["plex"]["token"],
+ self.config["plex"]["library_name"].get(),
+ self.config["plex"]["secure"],
+ self.config["plex"]["ignore_cert_errors"],
+ ).status_code,
+ 200,
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_plugin_mediafield.py b/test/plugins/test_plugin_mediafield.py
index 475d3a3779..004b3eaab2 100644
--- a/test/plugins/test_plugin_mediafield.py
+++ b/test/plugins/test_plugin_mediafield.py
@@ -18,33 +18,32 @@
import os
import shutil
import unittest
-
from test import _common
-from beets.library import Item
+
import mediafile
+
+from beets.library import Item
from beets.plugins import BeetsPlugin
from beets.util import bytestring_path, syspath
-
field_extension = mediafile.MediaField(
- mediafile.MP3DescStorageStyle('customtag'),
- mediafile.MP4StorageStyle('----:com.apple.iTunes:customtag'),
- mediafile.StorageStyle('customtag'),
- mediafile.ASFStorageStyle('customtag'),
+ mediafile.MP3DescStorageStyle("customtag"),
+ mediafile.MP4StorageStyle("----:com.apple.iTunes:customtag"),
+ mediafile.StorageStyle("customtag"),
+ mediafile.ASFStorageStyle("customtag"),
)
list_field_extension = mediafile.ListMediaField(
- mediafile.MP3ListDescStorageStyle('customlisttag'),
- mediafile.MP4ListStorageStyle('----:com.apple.iTunes:customlisttag'),
- mediafile.ListStorageStyle('customlisttag'),
- mediafile.ASFStorageStyle('customlisttag'),
+ mediafile.MP3ListDescStorageStyle("customlisttag"),
+ mediafile.MP4ListStorageStyle("----:com.apple.iTunes:customlisttag"),
+ mediafile.ListStorageStyle("customlisttag"),
+ mediafile.ASFStorageStyle("customlisttag"),
)
class ExtendedFieldTestMixin(_common.TestCase):
-
- def _mediafile_fixture(self, name, extension='mp3'):
- name = bytestring_path(name + '.' + extension)
+ def _mediafile_fixture(self, name, extension="mp3"):
+ name = bytestring_path(name + "." + extension)
src = os.path.join(_common.RSRC, name)
target = os.path.join(self.temp_dir, name)
shutil.copy(syspath(src), syspath(target))
@@ -52,84 +51,83 @@ def _mediafile_fixture(self, name, extension='mp3'):
def test_extended_field_write(self):
plugin = BeetsPlugin()
- plugin.add_media_field('customtag', field_extension)
+ plugin.add_media_field("customtag", field_extension)
try:
- mf = self._mediafile_fixture('empty')
- mf.customtag = 'F#'
+ mf = self._mediafile_fixture("empty")
+ mf.customtag = "F#"
mf.save()
mf = mediafile.MediaFile(mf.path)
- self.assertEqual(mf.customtag, 'F#')
+ self.assertEqual(mf.customtag, "F#")
finally:
- delattr(mediafile.MediaFile, 'customtag')
- Item._media_fields.remove('customtag')
+ delattr(mediafile.MediaFile, "customtag")
+ Item._media_fields.remove("customtag")
def test_extended_list_field_write(self):
plugin = BeetsPlugin()
- plugin.add_media_field('customlisttag', list_field_extension)
+ plugin.add_media_field("customlisttag", list_field_extension)
try:
- mf = self._mediafile_fixture('empty')
- mf.customlisttag = ['a', 'b']
+ mf = self._mediafile_fixture("empty")
+ mf.customlisttag = ["a", "b"]
mf.save()
mf = mediafile.MediaFile(mf.path)
- self.assertEqual(mf.customlisttag, ['a', 'b'])
+ self.assertEqual(mf.customlisttag, ["a", "b"])
finally:
- delattr(mediafile.MediaFile, 'customlisttag')
- Item._media_fields.remove('customlisttag')
+ delattr(mediafile.MediaFile, "customlisttag")
+ Item._media_fields.remove("customlisttag")
def test_write_extended_tag_from_item(self):
plugin = BeetsPlugin()
- plugin.add_media_field('customtag', field_extension)
+ plugin.add_media_field("customtag", field_extension)
try:
- mf = self._mediafile_fixture('empty')
+ mf = self._mediafile_fixture("empty")
self.assertIsNone(mf.customtag)
- item = Item(path=mf.path, customtag='Gb')
+ item = Item(path=mf.path, customtag="Gb")
item.write()
mf = mediafile.MediaFile(mf.path)
- self.assertEqual(mf.customtag, 'Gb')
+ self.assertEqual(mf.customtag, "Gb")
finally:
- delattr(mediafile.MediaFile, 'customtag')
- Item._media_fields.remove('customtag')
+ delattr(mediafile.MediaFile, "customtag")
+ Item._media_fields.remove("customtag")
def test_read_flexible_attribute_from_file(self):
plugin = BeetsPlugin()
- plugin.add_media_field('customtag', field_extension)
+ plugin.add_media_field("customtag", field_extension)
try:
- mf = self._mediafile_fixture('empty')
- mf.update({'customtag': 'F#'})
+ mf = self._mediafile_fixture("empty")
+ mf.update({"customtag": "F#"})
mf.save()
item = Item.from_path(mf.path)
- self.assertEqual(item['customtag'], 'F#')
+ self.assertEqual(item["customtag"], "F#")
finally:
- delattr(mediafile.MediaFile, 'customtag')
- Item._media_fields.remove('customtag')
+ delattr(mediafile.MediaFile, "customtag")
+ Item._media_fields.remove("customtag")
def test_invalid_descriptor(self):
with self.assertRaises(ValueError) as cm:
- mediafile.MediaFile.add_field('somekey', True)
- self.assertIn('must be an instance of MediaField',
- str(cm.exception))
+ mediafile.MediaFile.add_field("somekey", True)
+ self.assertIn("must be an instance of MediaField", str(cm.exception))
def test_overwrite_property(self):
with self.assertRaises(ValueError) as cm:
- mediafile.MediaFile.add_field('artist', mediafile.MediaField())
- self.assertIn('property "artist" already exists',
- str(cm.exception))
+ mediafile.MediaFile.add_field("artist", mediafile.MediaField())
+ self.assertIn('property "artist" already exists', str(cm.exception))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_random.py b/test/plugins/test_random.py
index 4c63f45871..dc92177458 100644
--- a/test/plugins/test_random.py
+++ b/test/plugins/test_random.py
@@ -16,11 +16,10 @@
"""
-import unittest
-from test.helper import TestHelper
-
import math
+import unittest
from random import Random
+from test.helper import TestHelper
from beets import random
@@ -28,8 +27,8 @@
class RandomTest(unittest.TestCase, TestHelper):
def setUp(self):
self.lib = None
- self.artist1 = 'Artist 1'
- self.artist2 = 'Artist 2'
+ self.artist1 = "Artist 1"
+ self.artist2 = "Artist 2"
self.item1 = self.create_item(artist=self.artist1)
self.item2 = self.create_item(artist=self.artist2)
self.items = [self.item1, self.item2]
@@ -43,13 +42,12 @@ def tearDown(self):
def _stats(self, data):
mean = sum(data) / len(data)
- stdev = math.sqrt(
- sum((p - mean) ** 2 for p in data) / (len(data) - 1))
+ stdev = math.sqrt(sum((p - mean) ** 2 for p in data) / (len(data) - 1))
quot, rem = divmod(len(data), 2)
if rem:
median = sorted(data)[quot]
else:
- median = sum(sorted(data)[quot - 1:quot + 1]) / 2
+ median = sum(sorted(data)[quot - 1 : quot + 1]) / 2
return mean, stdev, median
def test_equal_permutation(self):
@@ -58,23 +56,27 @@ def test_equal_permutation(self):
the solo track will almost always end up near the start. If we use a
different field then it'll be in the middle on average.
"""
+
def experiment(field, histogram=False):
"""Permutes the list of items 500 times and calculates the position
of self.item1 each time. Returns stats about that position.
"""
positions = []
for _ in range(500):
- shuffled = list(random._equal_chance_permutation(
- self.items, field=field, random_gen=self.random_gen))
+ shuffled = list(
+ random._equal_chance_permutation(
+ self.items, field=field, random_gen=self.random_gen
+ )
+ )
positions.append(shuffled.index(self.item1))
# Print a histogram (useful for debugging).
if histogram:
for i in range(len(self.items)):
- print('{:2d} {}'.format(i, '*' * positions.count(i)))
+ print("{:2d} {}".format(i, "*" * positions.count(i)))
return self._stats(positions)
- mean1, stdev1, median1 = experiment('artist')
- mean2, stdev2, median2 = experiment('track')
+ mean1, stdev1, median1 = experiment("artist")
+ mean2, stdev2, median2 = experiment("track")
self.assertAlmostEqual(0, median1, delta=1)
self.assertAlmostEqual(len(self.items) // 2, median2, delta=1)
self.assertGreater(stdev2, stdev1)
@@ -83,5 +85,6 @@ def experiment(field, histogram=False):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_replaygain.py b/test/plugins/test_replaygain.py
index 554fe98e3c..e80c79bc61 100644
--- a/test/plugins/test_replaygain.py
+++ b/test/plugins/test_replaygain.py
@@ -14,73 +14,74 @@
import unittest
+from test.helper import TestHelper, has_program
+
from mediafile import MediaFile
from beets import config
-from beetsplug.replaygain import (FatalGstreamerPluginReplayGainError,
- GStreamerBackend)
-from test.helper import TestHelper, has_program
+from beetsplug.replaygain import (
+ FatalGstreamerPluginReplayGainError,
+ GStreamerBackend,
+)
try:
import gi
- gi.require_version('Gst', '1.0')
+
+ gi.require_version("Gst", "1.0")
GST_AVAILABLE = True
except (ImportError, ValueError):
GST_AVAILABLE = False
-if any(has_program(cmd, ['-v']) for cmd in ['mp3gain', 'aacgain']):
+if any(has_program(cmd, ["-v"]) for cmd in ["mp3gain", "aacgain"]):
GAIN_PROG_AVAILABLE = True
else:
GAIN_PROG_AVAILABLE = False
-FFMPEG_AVAILABLE = has_program('ffmpeg', ['-version'])
+FFMPEG_AVAILABLE = has_program("ffmpeg", ["-version"])
def reset_replaygain(item):
- item['rg_track_peak'] = None
- item['rg_track_gain'] = None
- item['rg_album_gain'] = None
- item['rg_album_gain'] = None
- item['r128_track_gain'] = None
- item['r128_album_gain'] = None
+ item["rg_track_peak"] = None
+ item["rg_track_gain"] = None
+ item["rg_album_gain"] = None
+ item["rg_album_gain"] = None
+ item["r128_track_gain"] = None
+ item["r128_album_gain"] = None
item.write()
item.store()
-class GstBackendMixin():
- backend = 'gstreamer'
+class GstBackendMixin:
+ backend = "gstreamer"
has_r128_support = True
def test_backend(self):
- """Check whether the backend actually has all required functionality.
- """
+ """Check whether the backend actually has all required functionality."""
try:
# Check if required plugins can be loaded by instantiating a
# GStreamerBackend (via its .__init__).
- config['replaygain']['targetlevel'] = 89
- GStreamerBackend(config['replaygain'], None)
+ config["replaygain"]["targetlevel"] = 89
+ GStreamerBackend(config["replaygain"], None)
except FatalGstreamerPluginReplayGainError as e:
# Skip the test if plugins could not be loaded.
self.skipTest(str(e))
-class CmdBackendMixin():
- backend = 'command'
+class CmdBackendMixin:
+ backend = "command"
has_r128_support = False
def test_backend(self):
- """Check whether the backend actually has all required functionality.
- """
+ """Check whether the backend actually has all required functionality."""
pass
-class FfmpegBackendMixin():
- backend = 'ffmpeg'
+class FfmpegBackendMixin:
+ backend = "ffmpeg"
has_r128_support = True
def test_backend(self):
- """Check whether the backend actually has all required functionality.
- """
+ """Check whether the backend actually has all required functionality."""
pass
@@ -92,10 +93,10 @@ def setUp(self):
self.test_backend()
self.setup_beets(disk=True)
- self.config['replaygain']['backend'] = self.backend
+ self.config["replaygain"]["backend"] = self.backend
try:
- self.load_plugins('replaygain')
+ self.load_plugins("replaygain")
except Exception:
self.teardown_beets()
self.unload_plugins()
@@ -122,22 +123,26 @@ def test_cli_saves_track_gain(self):
self.assertIsNone(mediafile.rg_track_peak)
self.assertIsNone(mediafile.rg_track_gain)
- self.run_command('replaygain')
+ self.run_command("replaygain")
# Skip the test if rg_track_peak and rg_track gain is None, assuming
# that it could only happen if the decoder plugins are missing.
- if all(i.rg_track_peak is None and i.rg_track_gain is None
- for i in self.lib.items()):
- self.skipTest('decoder plugins could not be loaded.')
+ if all(
+ i.rg_track_peak is None and i.rg_track_gain is None
+ for i in self.lib.items()
+ ):
+ self.skipTest("decoder plugins could not be loaded.")
for item in self.lib.items():
self.assertIsNotNone(item.rg_track_peak)
self.assertIsNotNone(item.rg_track_gain)
mediafile = MediaFile(item.path)
self.assertAlmostEqual(
- mediafile.rg_track_peak, item.rg_track_peak, places=6)
+ mediafile.rg_track_peak, item.rg_track_peak, places=6
+ )
self.assertAlmostEqual(
- mediafile.rg_track_gain, item.rg_track_gain, places=2)
+ mediafile.rg_track_gain, item.rg_track_gain, places=2
+ )
def test_cli_skips_calculated_tracks(self):
album_rg = self._add_album(1)
@@ -147,7 +152,7 @@ def test_cli_skips_calculated_tracks(self):
album_r128 = self._add_album(1, ext="opus")
item_r128 = album_r128.items()[0]
- self.run_command('replaygain')
+ self.run_command("replaygain")
item_rg.load()
self.assertIsNotNone(item_rg.rg_track_gain)
@@ -170,7 +175,7 @@ def test_cli_skips_calculated_tracks(self):
item_r128.store()
r128_track_gain = item_r128.r128_track_gain
- self.run_command('replaygain')
+ self.run_command("replaygain")
item_rg.load()
self.assertEqual(item_rg.rg_track_gain, rg_track_gain)
@@ -181,13 +186,15 @@ def test_cli_skips_calculated_tracks(self):
self.assertEqual(item_r128.r128_track_gain, r128_track_gain)
def test_cli_does_not_skip_wrong_tag_type(self):
- """Check that items that have tags of the wrong type won't be skipped.
- """
+ """Check that items that have tags of the wrong type won't be skipped."""
if not self.has_r128_support:
# This test is a lot less interesting if the backend cannot write
# both tag types.
- self.skipTest("r128 tags for opus not supported on backend {}"
- .format(self.backend))
+ self.skipTest(
+ "r128 tags for opus not supported on backend {}".format(
+ self.backend
+ )
+ )
album_rg = self._add_album(1)
item_rg = album_rg.items()[0]
@@ -202,7 +209,7 @@ def test_cli_does_not_skip_wrong_tag_type(self):
item_r128.rg_track_peak = 42.0
item_r128.store()
- self.run_command('replaygain')
+ self.run_command("replaygain")
item_rg.load()
item_r128.load()
@@ -224,7 +231,7 @@ def test_cli_saves_album_gain_to_file(self):
self.assertIsNone(mediafile.rg_album_peak)
self.assertIsNone(mediafile.rg_album_gain)
- self.run_command('replaygain', '-a')
+ self.run_command("replaygain", "-a")
peaks = []
gains = []
@@ -242,12 +249,15 @@ def test_cli_saves_album_gain_to_file(self):
def test_cli_writes_only_r128_tags(self):
if not self.has_r128_support:
- self.skipTest("r128 tags for opus not supported on backend {}"
- .format(self.backend))
+ self.skipTest(
+ "r128 tags for opus not supported on backend {}".format(
+ self.backend
+ )
+ )
album = self._add_album(2, ext="opus")
- self.run_command('replaygain', '-a')
+ self.run_command("replaygain", "-a")
for item in album.items():
mediafile = MediaFile(item.path)
@@ -263,8 +273,8 @@ def test_targetlevel_has_effect(self):
item = album.items()[0]
def analyse(target_level):
- self.config['replaygain']['targetlevel'] = target_level
- self.run_command('replaygain', '-f')
+ self.config["replaygain"]["targetlevel"] = target_level
+ self.run_command("replaygain", "-f")
item.load()
return item.rg_track_gain
@@ -275,15 +285,18 @@ def analyse(target_level):
def test_r128_targetlevel_has_effect(self):
if not self.has_r128_support:
- self.skipTest("r128 tags for opus not supported on backend {}"
- .format(self.backend))
+ self.skipTest(
+ "r128 tags for opus not supported on backend {}".format(
+ self.backend
+ )
+ )
album = self._add_album(1, ext="opus")
item = album.items()[0]
def analyse(target_level):
- self.config['replaygain']['r128_targetlevel'] = target_level
- self.run_command('replaygain', '-f')
+ self.config["replaygain"]["r128_targetlevel"] = target_level
+ self.run_command("replaygain", "-f")
item.load()
return item.r128_track_gain
@@ -295,8 +308,8 @@ def analyse(target_level):
def test_per_disc(self):
# Use the per_disc option and add a little more concurrency.
album = self._add_album(track_count=4, disc_count=3)
- self.config['replaygain']['per_disc'] = True
- self.run_command('replaygain', '-a')
+ self.config["replaygain"]["per_disc"] = True
+ self.run_command("replaygain", "-a")
# FIXME: Add fixtures with known track/album gain (within a suitable
# tolerance) so that we can actually check per-disc operation here.
@@ -305,27 +318,31 @@ def test_per_disc(self):
self.assertIsNotNone(item.rg_album_gain)
-@unittest.skipIf(not GST_AVAILABLE, 'gstreamer cannot be found')
-class ReplayGainGstCliTest(ReplayGainCliTestBase, unittest.TestCase,
- GstBackendMixin):
+@unittest.skipIf(not GST_AVAILABLE, "gstreamer cannot be found")
+class ReplayGainGstCliTest(
+ ReplayGainCliTestBase, unittest.TestCase, GstBackendMixin
+):
FNAME = "full" # file contains only silence
-@unittest.skipIf(not GAIN_PROG_AVAILABLE, 'no *gain command found')
-class ReplayGainCmdCliTest(ReplayGainCliTestBase, unittest.TestCase,
- CmdBackendMixin):
+@unittest.skipIf(not GAIN_PROG_AVAILABLE, "no *gain command found")
+class ReplayGainCmdCliTest(
+ ReplayGainCliTestBase, unittest.TestCase, CmdBackendMixin
+):
FNAME = "full" # file contains only silence
-@unittest.skipIf(not FFMPEG_AVAILABLE, 'ffmpeg cannot be found')
-class ReplayGainFfmpegCliTest(ReplayGainCliTestBase, unittest.TestCase,
- FfmpegBackendMixin):
+@unittest.skipIf(not FFMPEG_AVAILABLE, "ffmpeg cannot be found")
+class ReplayGainFfmpegCliTest(
+ ReplayGainCliTestBase, unittest.TestCase, FfmpegBackendMixin
+):
FNAME = "full" # file contains only silence
-@unittest.skipIf(not FFMPEG_AVAILABLE, 'ffmpeg cannot be found')
-class ReplayGainFfmpegNoiseCliTest(ReplayGainCliTestBase, unittest.TestCase,
- FfmpegBackendMixin):
+@unittest.skipIf(not FFMPEG_AVAILABLE, "ffmpeg cannot be found")
+class ReplayGainFfmpegNoiseCliTest(
+ ReplayGainCliTestBase, unittest.TestCase, FfmpegBackendMixin
+):
FNAME = "whitenoise"
@@ -337,11 +354,11 @@ def setUp(self):
self.test_backend()
self.setup_beets(disk=True)
- self.config['threaded'] = self.threaded
- self.config['replaygain']['backend'] = self.backend
+ self.config["threaded"] = self.threaded
+ self.config["replaygain"]["backend"] = self.backend
try:
- self.load_plugins('replaygain')
+ self.load_plugins("replaygain")
except Exception:
self.teardown_beets()
self.unload_plugins()
@@ -362,27 +379,27 @@ def test_import_converted(self):
self.assertIsNotNone(item.rg_album_gain)
-@unittest.skipIf(not GST_AVAILABLE, 'gstreamer cannot be found')
-class ReplayGainGstImportTest(ImportTest, unittest.TestCase,
- GstBackendMixin):
+@unittest.skipIf(not GST_AVAILABLE, "gstreamer cannot be found")
+class ReplayGainGstImportTest(ImportTest, unittest.TestCase, GstBackendMixin):
pass
-@unittest.skipIf(not GAIN_PROG_AVAILABLE, 'no *gain command found')
-class ReplayGainCmdImportTest(ImportTest, unittest.TestCase,
- CmdBackendMixin):
+@unittest.skipIf(not GAIN_PROG_AVAILABLE, "no *gain command found")
+class ReplayGainCmdImportTest(ImportTest, unittest.TestCase, CmdBackendMixin):
pass
-@unittest.skipIf(not FFMPEG_AVAILABLE, 'ffmpeg cannot be found')
-class ReplayGainFfmpegImportTest(ImportTest, unittest.TestCase,
- FfmpegBackendMixin):
+@unittest.skipIf(not FFMPEG_AVAILABLE, "ffmpeg cannot be found")
+class ReplayGainFfmpegImportTest(
+ ImportTest, unittest.TestCase, FfmpegBackendMixin
+):
pass
-@unittest.skipIf(not FFMPEG_AVAILABLE, 'ffmpeg cannot be found')
-class ReplayGainFfmpegThreadedImportTest(ImportTest, unittest.TestCase,
- FfmpegBackendMixin):
+@unittest.skipIf(not FFMPEG_AVAILABLE, "ffmpeg cannot be found")
+class ReplayGainFfmpegThreadedImportTest(
+ ImportTest, unittest.TestCase, FfmpegBackendMixin
+):
threaded = True
@@ -390,5 +407,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_smartplaylist.py b/test/plugins/test_smartplaylist.py
index 4f254bec54..a3a03b54ca 100644
--- a/test/plugins/test_smartplaylist.py
+++ b/test/plugins/test_smartplaylist.py
@@ -13,23 +13,21 @@
# included in all copies or substantial portions of the Software.
+import unittest
from os import path, remove
-from tempfile import mkdtemp
from shutil import rmtree
-import unittest
-
-from unittest.mock import Mock, MagicMock
+from tempfile import mkdtemp
+from test import _common
+from test.helper import TestHelper
+from unittest.mock import MagicMock, Mock
-from beetsplug.smartplaylist import SmartPlaylistPlugin
-from beets.library import Item, Album, parse_query_string
+from beets import config
from beets.dbcore import OrQuery
-from beets.dbcore.query import NullSort, MultipleSort, FixedFieldSort
-from beets.util import syspath, bytestring_path, py3_path, CHAR_REPLACE
+from beets.dbcore.query import FixedFieldSort, MultipleSort, NullSort
+from beets.library import Album, Item, parse_query_string
from beets.ui import UserError
-from beets import config
-
-from test import _common
-from test.helper import TestHelper
+from beets.util import CHAR_REPLACE, bytestring_path, py3_path, syspath
+from beetsplug.smartplaylist import SmartPlaylistPlugin
class SmartPlaylistTest(_common.TestCase):
@@ -38,64 +36,74 @@ def test_build_queries(self):
self.assertEqual(spl._matched_playlists, None)
self.assertEqual(spl._unmatched_playlists, None)
- config['smartplaylist']['playlists'].set([])
+ config["smartplaylist"]["playlists"].set([])
spl.build_queries()
self.assertEqual(spl._matched_playlists, set())
self.assertEqual(spl._unmatched_playlists, set())
- config['smartplaylist']['playlists'].set([
- {'name': 'foo',
- 'query': 'FOO foo'},
- {'name': 'bar',
- 'album_query': ['BAR bar1', 'BAR bar2']},
- {'name': 'baz',
- 'query': 'BAZ baz',
- 'album_query': 'BAZ baz'}
- ])
+ config["smartplaylist"]["playlists"].set(
+ [
+ {"name": "foo", "query": "FOO foo"},
+ {"name": "bar", "album_query": ["BAR bar1", "BAR bar2"]},
+ {"name": "baz", "query": "BAZ baz", "album_query": "BAZ baz"},
+ ]
+ )
spl.build_queries()
self.assertEqual(spl._matched_playlists, set())
- foo_foo = parse_query_string('FOO foo', Item)
- baz_baz = parse_query_string('BAZ baz', Item)
- baz_baz2 = parse_query_string('BAZ baz', Album)
- bar_bar = OrQuery((parse_query_string('BAR bar1', Album)[0],
- parse_query_string('BAR bar2', Album)[0]))
- self.assertEqual(spl._unmatched_playlists, {
- ('foo', foo_foo, (None, None)),
- ('baz', baz_baz, baz_baz2),
- ('bar', (None, None), (bar_bar, None)),
- })
+ foo_foo = parse_query_string("FOO foo", Item)
+ baz_baz = parse_query_string("BAZ baz", Item)
+ baz_baz2 = parse_query_string("BAZ baz", Album)
+ bar_bar = OrQuery(
+ (
+ parse_query_string("BAR bar1", Album)[0],
+ parse_query_string("BAR bar2", Album)[0],
+ )
+ )
+ self.assertEqual(
+ spl._unmatched_playlists,
+ {
+ ("foo", foo_foo, (None, None)),
+ ("baz", baz_baz, baz_baz2),
+ ("bar", (None, None), (bar_bar, None)),
+ },
+ )
def test_build_queries_with_sorts(self):
spl = SmartPlaylistPlugin()
- config['smartplaylist']['playlists'].set([
- {'name': 'no_sort',
- 'query': 'foo'},
- {'name': 'one_sort',
- 'query': 'foo year+'},
- {'name': 'only_empty_sorts',
- 'query': ['foo', 'bar']},
- {'name': 'one_non_empty_sort',
- 'query': ['foo year+', 'bar']},
- {'name': 'multiple_sorts',
- 'query': ['foo year+', 'bar genre-']},
- {'name': 'mixed',
- 'query': ['foo year+', 'bar', 'baz genre+ id-']}
- ])
+ config["smartplaylist"]["playlists"].set(
+ [
+ {"name": "no_sort", "query": "foo"},
+ {"name": "one_sort", "query": "foo year+"},
+ {"name": "only_empty_sorts", "query": ["foo", "bar"]},
+ {"name": "one_non_empty_sort", "query": ["foo year+", "bar"]},
+ {
+ "name": "multiple_sorts",
+ "query": ["foo year+", "bar genre-"],
+ },
+ {
+ "name": "mixed",
+ "query": ["foo year+", "bar", "baz genre+ id-"],
+ },
+ ]
+ )
spl.build_queries()
- sorts = {name: sort
- for name, (_, sort), _ in spl._unmatched_playlists}
+ sorts = {name: sort for name, (_, sort), _ in spl._unmatched_playlists}
asseq = self.assertEqual # less cluttered code
sort = FixedFieldSort # short cut since we're only dealing with this
asseq(sorts["no_sort"], NullSort())
- asseq(sorts["one_sort"], sort('year'))
+ asseq(sorts["one_sort"], sort("year"))
asseq(sorts["only_empty_sorts"], None)
- asseq(sorts["one_non_empty_sort"], sort('year'))
- asseq(sorts["multiple_sorts"],
- MultipleSort([sort('year'), sort('genre', False)]))
- asseq(sorts["mixed"],
- MultipleSort([sort('year'), sort('genre'), sort('id', False)]))
+ asseq(sorts["one_non_empty_sort"], sort("year"))
+ asseq(
+ sorts["multiple_sorts"],
+ MultipleSort([sort("year"), sort("genre", False)]),
+ )
+ asseq(
+ sorts["mixed"],
+ MultipleSort([sort("year"), sort("genre"), sort("id", False)]),
+ )
def test_matches(self):
spl = SmartPlaylistPlugin()
@@ -123,9 +131,9 @@ def test_db_changes(self):
spl = SmartPlaylistPlugin()
nones = None, None
- pl1 = '1', ('q1', None), nones
- pl2 = '2', ('q2', None), nones
- pl3 = '3', ('q3', None), nones
+ pl1 = "1", ("q1", None), nones
+ pl2 = "2", ("q2", None), nones
+ pl3 = "3", ("q3", None), nones
spl._unmatched_playlists = {pl1, pl2, pl3}
spl._matched_playlists = set()
@@ -135,12 +143,12 @@ def test_db_changes(self):
self.assertEqual(spl._unmatched_playlists, {pl1, pl2, pl3})
self.assertEqual(spl._matched_playlists, set())
- spl.matches.side_effect = lambda _, q, __: q == 'q3'
+ spl.matches.side_effect = lambda _, q, __: q == "q3"
spl.db_change(None, "matches 3")
self.assertEqual(spl._unmatched_playlists, {pl1, pl2})
self.assertEqual(spl._matched_playlists, {pl3})
- spl.matches.side_effect = lambda _, q, __: q == 'q1'
+ spl.matches.side_effect = lambda _, q, __: q == "q1"
spl.db_change(None, "matches 3")
self.assertEqual(spl._matched_playlists, {pl1, pl3})
self.assertEqual(spl._unmatched_playlists, {pl2})
@@ -148,9 +156,10 @@ def test_db_changes(self):
def test_playlist_update(self):
spl = SmartPlaylistPlugin()
- i = Mock(path=b'/tagada.mp3')
- i.evaluate_template.side_effect = \
- lambda pl, _: pl.replace(b'$title', b'ta:ga:da').decode()
+ i = Mock(path=b"/tagada.mp3")
+ i.evaluate_template.side_effect = lambda pl, _: pl.replace(
+ b"$title", b"ta:ga:da"
+ ).decode()
lib = Mock()
lib.replacements = CHAR_REPLACE
@@ -159,12 +168,12 @@ def test_playlist_update(self):
q = Mock()
a_q = Mock()
- pl = b'$title-my.m3u', (q, None), (a_q, None)
+ pl = b"$title-my.m3u", (q, None), (a_q, None)
spl._matched_playlists = [pl]
dir = bytestring_path(mkdtemp())
- config['smartplaylist']['relative_to'] = False
- config['smartplaylist']['playlist_dir'] = py3_path(dir)
+ config["smartplaylist"]["relative_to"] = False
+ config["smartplaylist"]["playlist_dir"] = py3_path(dir)
try:
spl.update_playlists(lib)
except Exception:
@@ -174,13 +183,13 @@ def test_playlist_update(self):
lib.items.assert_called_once_with(q, None)
lib.albums.assert_called_once_with(a_q, None)
- m3u_filepath = path.join(dir, b'ta_ga_da-my_playlist_.m3u')
+ m3u_filepath = path.join(dir, b"ta_ga_da-my_playlist_.m3u")
self.assertExists(m3u_filepath)
- with open(syspath(m3u_filepath), 'rb') as f:
+ with open(syspath(m3u_filepath), "rb") as f:
content = f.read()
rmtree(syspath(dir))
- self.assertEqual(content, b'/tagada.mp3\n')
+ self.assertEqual(content, b"/tagada.mp3\n")
class SmartPlaylistCLITest(_common.TestCase, TestHelper):
@@ -188,14 +197,14 @@ def setUp(self):
self.setup_beets()
self.item = self.add_item()
- config['smartplaylist']['playlists'].set([
- {'name': 'my_playlist.m3u',
- 'query': self.item.title},
- {'name': 'all.m3u',
- 'query': ''}
- ])
- config['smartplaylist']['playlist_dir'].set(py3_path(self.temp_dir))
- self.load_plugins('smartplaylist')
+ config["smartplaylist"]["playlists"].set(
+ [
+ {"name": "my_playlist.m3u", "query": self.item.title},
+ {"name": "all.m3u", "query": ""},
+ ]
+ )
+ config["smartplaylist"]["playlist_dir"].set(py3_path(self.temp_dir))
+ self.load_plugins("smartplaylist")
def tearDown(self):
self.unload_plugins()
@@ -203,23 +212,23 @@ def tearDown(self):
def test_splupdate(self):
with self.assertRaises(UserError):
- self.run_with_output('splupdate', 'tagada')
+ self.run_with_output("splupdate", "tagada")
- self.run_with_output('splupdate', 'my_playlist')
- m3u_path = path.join(self.temp_dir, b'my_playlist.m3u')
+ self.run_with_output("splupdate", "my_playlist")
+ m3u_path = path.join(self.temp_dir, b"my_playlist.m3u")
self.assertExists(m3u_path)
- with open(syspath(m3u_path), 'rb') as f:
+ with open(syspath(m3u_path), "rb") as f:
self.assertEqual(f.read(), self.item.path + b"\n")
remove(syspath(m3u_path))
- self.run_with_output('splupdate', 'my_playlist.m3u')
- with open(syspath(m3u_path), 'rb') as f:
+ self.run_with_output("splupdate", "my_playlist.m3u")
+ with open(syspath(m3u_path), "rb") as f:
self.assertEqual(f.read(), self.item.path + b"\n")
remove(syspath(m3u_path))
- self.run_with_output('splupdate')
- for name in (b'my_playlist.m3u', b'all.m3u'):
- with open(path.join(self.temp_dir, name), 'rb') as f:
+ self.run_with_output("splupdate")
+ for name in (b"my_playlist.m3u", b"all.m3u"):
+ with open(path.join(self.temp_dir, name), "rb") as f:
self.assertEqual(f.read(), self.item.path + b"\n")
@@ -227,5 +236,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_spotify.py b/test/plugins/test_spotify.py
index 76148862de..19d1ca3e79 100644
--- a/test/plugins/test_spotify.py
+++ b/test/plugins/test_spotify.py
@@ -2,15 +2,16 @@
import os
-import responses
import unittest
-
from test import _common
+from test.helper import TestHelper
+from urllib.parse import parse_qs, urlparse
+
+import responses
+
from beets import config
from beets.library import Item
from beetsplug import spotify
-from test.helper import TestHelper
-from urllib.parse import parse_qs, urlparse
class ArgumentsMock:
@@ -35,11 +36,11 @@ def setUp(self):
spotify.SpotifyPlugin.oauth_token_url,
status=200,
json={
- 'access_token': '3XyiC3raJySbIAV5LVYj1DaWbcocNi3LAJTNXRnYY'
- 'GVUl6mbbqXNhW3YcZnQgYXNWHFkVGSMlc0tMuvq8CF',
- 'token_type': 'Bearer',
- 'expires_in': 3600,
- 'scope': '',
+ "access_token": "3XyiC3raJySbIAV5LVYj1DaWbcocNi3LAJTNXRnYY"
+ "GVUl6mbbqXNhW3YcZnQgYXNWHFkVGSMlc0tMuvq8CF",
+ "token_type": "Bearer",
+ "expires_in": 3600,
+ "scope": "",
},
)
self.spotify = spotify.SpotifyPlugin()
@@ -63,9 +64,9 @@ def test_empty_query(self):
@responses.activate
def test_missing_request(self):
json_file = os.path.join(
- _common.RSRC, b'spotify', b'missing_request.json'
+ _common.RSRC, b"spotify", b"missing_request.json"
)
- with open(json_file, 'rb') as f:
+ with open(json_file, "rb") as f:
response_body = f.read()
responses.add(
@@ -73,31 +74,31 @@ def test_missing_request(self):
spotify.SpotifyPlugin.search_url,
body=response_body,
status=200,
- content_type='application/json',
+ content_type="application/json",
)
item = Item(
- mb_trackid='01234',
- album='lkajsdflakjsd',
- albumartist='ujydfsuihse',
- title='duifhjslkef',
+ mb_trackid="01234",
+ album="lkajsdflakjsd",
+ albumartist="ujydfsuihse",
+ title="duifhjslkef",
length=10,
)
item.add(self.lib)
self.assertEqual([], self.spotify._match_library_tracks(self.lib, ""))
params = _params(responses.calls[0].request.url)
- query = params['q'][0]
- self.assertIn('duifhjslkef', query)
- self.assertIn('artist:ujydfsuihse', query)
- self.assertIn('album:lkajsdflakjsd', query)
- self.assertEqual(params['type'], ['track'])
+ query = params["q"][0]
+ self.assertIn("duifhjslkef", query)
+ self.assertIn("artist:ujydfsuihse", query)
+ self.assertIn("album:lkajsdflakjsd", query)
+ self.assertEqual(params["type"], ["track"])
@responses.activate
def test_track_request(self):
json_file = os.path.join(
- _common.RSRC, b'spotify', b'track_request.json'
+ _common.RSRC, b"spotify", b"track_request.json"
)
- with open(json_file, 'rb') as f:
+ with open(json_file, "rb") as f:
response_body = f.read()
responses.add(
@@ -105,67 +106,63 @@ def test_track_request(self):
spotify.SpotifyPlugin.search_url,
body=response_body,
status=200,
- content_type='application/json',
+ content_type="application/json",
)
item = Item(
- mb_trackid='01234',
- album='Despicable Me 2',
- albumartist='Pharrell Williams',
- title='Happy',
+ mb_trackid="01234",
+ album="Despicable Me 2",
+ albumartist="Pharrell Williams",
+ title="Happy",
length=10,
)
item.add(self.lib)
results = self.spotify._match_library_tracks(self.lib, "Happy")
self.assertEqual(1, len(results))
- self.assertEqual("6NPVjNh8Jhru9xOmyQigds", results[0]['id'])
+ self.assertEqual("6NPVjNh8Jhru9xOmyQigds", results[0]["id"])
self.spotify._output_match_results(results)
params = _params(responses.calls[0].request.url)
- query = params['q'][0]
- self.assertIn('Happy', query)
- self.assertIn('artist:Pharrell Williams', query)
- self.assertIn('album:Despicable Me 2', query)
- self.assertEqual(params['type'], ['track'])
+ query = params["q"][0]
+ self.assertIn("Happy", query)
+ self.assertIn("artist:Pharrell Williams", query)
+ self.assertIn("album:Despicable Me 2", query)
+ self.assertEqual(params["type"], ["track"])
@responses.activate
def test_track_for_id(self):
"""Tests if plugin is able to fetch a track by its Spotify ID"""
# Mock the Spotify 'Get Track' call
- json_file = os.path.join(
- _common.RSRC, b'spotify', b'track_info.json'
- )
- with open(json_file, 'rb') as f:
+ json_file = os.path.join(_common.RSRC, b"spotify", b"track_info.json")
+ with open(json_file, "rb") as f:
response_body = f.read()
responses.add(
responses.GET,
- spotify.SpotifyPlugin.track_url + '6NPVjNh8Jhru9xOmyQigds',
+ spotify.SpotifyPlugin.track_url + "6NPVjNh8Jhru9xOmyQigds",
body=response_body,
status=200,
- content_type='application/json',
+ content_type="application/json",
)
# Mock the Spotify 'Get Album' call
- json_file = os.path.join(
- _common.RSRC, b'spotify', b'album_info.json'
- )
- with open(json_file, 'rb') as f:
+ json_file = os.path.join(_common.RSRC, b"spotify", b"album_info.json")
+ with open(json_file, "rb") as f:
response_body = f.read()
responses.add(
responses.GET,
- spotify.SpotifyPlugin.album_url + '5l3zEmMrOhOzG8d8s83GOL',
+ spotify.SpotifyPlugin.album_url + "5l3zEmMrOhOzG8d8s83GOL",
body=response_body,
status=200,
- content_type='application/json',
+ content_type="application/json",
)
# Mock the Spotify 'Search' call
json_file = os.path.join(
- _common.RSRC, b'spotify', b'track_request.json'
+ _common.RSRC, b"spotify", b"track_request.json"
)
- with open(json_file, 'rb') as f:
+ with open(json_file, "rb") as f:
response_body = f.read()
responses.add(
@@ -173,26 +170,26 @@ def test_track_for_id(self):
spotify.SpotifyPlugin.search_url,
body=response_body,
status=200,
- content_type='application/json',
+ content_type="application/json",
)
- track_info = self.spotify.track_for_id('6NPVjNh8Jhru9xOmyQigds')
+ track_info = self.spotify.track_for_id("6NPVjNh8Jhru9xOmyQigds")
item = Item(
mb_trackid=track_info.track_id,
albumartist=track_info.artist,
title=track_info.title,
- length=track_info.length
+ length=track_info.length,
)
item.add(self.lib)
results = self.spotify._match_library_tracks(self.lib, "Happy")
self.assertEqual(1, len(results))
- self.assertEqual("6NPVjNh8Jhru9xOmyQigds", results[0]['id'])
+ self.assertEqual("6NPVjNh8Jhru9xOmyQigds", results[0]["id"])
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_subsonicupdate.py b/test/plugins/test_subsonicupdate.py
index a516e34871..3d401102f0 100644
--- a/test/plugins/test_subsonicupdate.py
+++ b/test/plugins/test_subsonicupdate.py
@@ -1,15 +1,16 @@
"""Tests for the 'subsonic' plugin."""
-import responses
import unittest
-
from test import _common
-from beets import config
-from beetsplug import subsonicupdate
from test.helper import TestHelper
from urllib.parse import parse_qs, urlparse
+import responses
+
+from beets import config
+from beetsplug import subsonicupdate
+
class ArgumentsMock:
"""Argument mocks for tests."""
@@ -28,6 +29,7 @@ def _params(url):
class SubsonicPluginTest(_common.TestCase, TestHelper):
"""Test class for subsonicupdate."""
+
@responses.activate
def setUp(self):
"""Sets up config and plugin for test."""
@@ -39,20 +41,21 @@ def setUp(self):
config["subsonic"]["url"] = "http://localhost:4040"
responses.add(
responses.GET,
- 'http://localhost:4040/rest/ping.view',
+ "http://localhost:4040/rest/ping.view",
status=200,
- body=self.PING_BODY
+ body=self.PING_BODY,
)
self.subsonicupdate = subsonicupdate.SubsonicUpdate()
- PING_BODY = '''
+
+ PING_BODY = """
{
"subsonic-response": {
"status": "failed",
"version": "1.15.0"
}
}
-'''
- SUCCESS_BODY = '''
+"""
+ SUCCESS_BODY = """
{
"subsonic-response": {
"status": "ok",
@@ -63,9 +66,9 @@ def setUp(self):
}
}
}
-'''
+"""
- FAILED_BODY = '''
+ FAILED_BODY = """
{
"subsonic-response": {
"status": "failed",
@@ -76,9 +79,9 @@ def setUp(self):
}
}
}
-'''
+"""
- ERROR_BODY = '''
+ ERROR_BODY = """
{
"timestamp": 1599185854498,
"status": 404,
@@ -86,7 +89,7 @@ def setUp(self):
"message": "No message available",
"path": "/rest/startScn"
}
-'''
+"""
def tearDown(self):
"""Tears down tests."""
@@ -97,9 +100,9 @@ def test_start_scan(self):
"""Tests success path based on best case scenario."""
responses.add(
responses.GET,
- 'http://localhost:4040/rest/startScan',
+ "http://localhost:4040/rest/startScan",
status=200,
- body=self.SUCCESS_BODY
+ body=self.SUCCESS_BODY,
)
self.subsonicupdate.start_scan()
@@ -109,9 +112,9 @@ def test_start_scan_failed_bad_credentials(self):
"""Tests failed path based on bad credentials."""
responses.add(
responses.GET,
- 'http://localhost:4040/rest/startScan',
+ "http://localhost:4040/rest/startScan",
status=200,
- body=self.FAILED_BODY
+ body=self.FAILED_BODY,
)
self.subsonicupdate.start_scan()
@@ -121,9 +124,9 @@ def test_start_scan_failed_not_found(self):
"""Tests failed path based on resource not found."""
responses.add(
responses.GET,
- 'http://localhost:4040/rest/startScan',
+ "http://localhost:4040/rest/startScan",
status=404,
- body=self.ERROR_BODY
+ body=self.ERROR_BODY,
)
self.subsonicupdate.start_scan()
@@ -139,9 +142,9 @@ def test_url_with_context_path(self):
responses.add(
responses.GET,
- 'http://localhost:4040/contextPath/rest/startScan',
+ "http://localhost:4040/contextPath/rest/startScan",
status=200,
- body=self.SUCCESS_BODY
+ body=self.SUCCESS_BODY,
)
self.subsonicupdate.start_scan()
@@ -153,9 +156,9 @@ def test_url_with_trailing_forward_slash_url(self):
responses.add(
responses.GET,
- 'http://localhost:4040/rest/startScan',
+ "http://localhost:4040/rest/startScan",
status=200,
- body=self.SUCCESS_BODY
+ body=self.SUCCESS_BODY,
)
self.subsonicupdate.start_scan()
@@ -167,9 +170,9 @@ def test_url_with_missing_port(self):
responses.add(
responses.GET,
- 'http://localhost/airsonic/rest/startScan',
+ "http://localhost/airsonic/rest/startScan",
status=200,
- body=self.SUCCESS_BODY
+ body=self.SUCCESS_BODY,
)
self.subsonicupdate.start_scan()
@@ -181,9 +184,9 @@ def test_url_with_missing_schema(self):
responses.add(
responses.GET,
- 'http://localhost:4040/rest/startScan',
+ "http://localhost:4040/rest/startScan",
status=200,
- body=self.SUCCESS_BODY
+ body=self.SUCCESS_BODY,
)
self.subsonicupdate.start_scan()
@@ -194,5 +197,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_the.py b/test/plugins/test_the.py
index 28feea373d..a42f1a8c47 100644
--- a/test/plugins/test_the.py
+++ b/test/plugins/test_the.py
@@ -3,65 +3,70 @@
import unittest
from test import _common
+
from beets import config
-from beetsplug.the import ThePlugin, PATTERN_A, PATTERN_THE, FORMAT
+from beetsplug.the import FORMAT, PATTERN_A, PATTERN_THE, ThePlugin
class ThePluginTest(_common.TestCase):
-
def test_unthe_with_default_patterns(self):
- self.assertEqual(ThePlugin().unthe('', PATTERN_THE), '')
- self.assertEqual(ThePlugin().unthe('The Something', PATTERN_THE),
- 'Something, The')
- self.assertEqual(ThePlugin().unthe('The The', PATTERN_THE),
- 'The, The')
- self.assertEqual(ThePlugin().unthe('The The', PATTERN_THE),
- 'The, The')
- self.assertEqual(ThePlugin().unthe('The The X', PATTERN_THE),
- 'The X, The')
- self.assertEqual(ThePlugin().unthe('the The', PATTERN_THE),
- 'The, the')
- self.assertEqual(ThePlugin().unthe('Protected The', PATTERN_THE),
- 'Protected The')
- self.assertEqual(ThePlugin().unthe('A Boy', PATTERN_A),
- 'Boy, A')
- self.assertEqual(ThePlugin().unthe('a girl', PATTERN_A),
- 'girl, a')
- self.assertEqual(ThePlugin().unthe('An Apple', PATTERN_A),
- 'Apple, An')
- self.assertEqual(ThePlugin().unthe('An A Thing', PATTERN_A),
- 'A Thing, An')
- self.assertEqual(ThePlugin().unthe('the An Arse', PATTERN_A),
- 'the An Arse')
- self.assertEqual(ThePlugin().unthe('TET - Travailleur', PATTERN_THE),
- 'TET - Travailleur')
+ self.assertEqual(ThePlugin().unthe("", PATTERN_THE), "")
+ self.assertEqual(
+ ThePlugin().unthe("The Something", PATTERN_THE), "Something, The"
+ )
+ self.assertEqual(ThePlugin().unthe("The The", PATTERN_THE), "The, The")
+ self.assertEqual(
+ ThePlugin().unthe("The The", PATTERN_THE), "The, The"
+ )
+ self.assertEqual(
+ ThePlugin().unthe("The The X", PATTERN_THE), "The X, The"
+ )
+ self.assertEqual(ThePlugin().unthe("the The", PATTERN_THE), "The, the")
+ self.assertEqual(
+ ThePlugin().unthe("Protected The", PATTERN_THE), "Protected The"
+ )
+ self.assertEqual(ThePlugin().unthe("A Boy", PATTERN_A), "Boy, A")
+ self.assertEqual(ThePlugin().unthe("a girl", PATTERN_A), "girl, a")
+ self.assertEqual(ThePlugin().unthe("An Apple", PATTERN_A), "Apple, An")
+ self.assertEqual(
+ ThePlugin().unthe("An A Thing", PATTERN_A), "A Thing, An"
+ )
+ self.assertEqual(
+ ThePlugin().unthe("the An Arse", PATTERN_A), "the An Arse"
+ )
+ self.assertEqual(
+ ThePlugin().unthe("TET - Travailleur", PATTERN_THE),
+ "TET - Travailleur",
+ )
def test_unthe_with_strip(self):
- config['the']['strip'] = True
- self.assertEqual(ThePlugin().unthe('The Something', PATTERN_THE),
- 'Something')
- self.assertEqual(ThePlugin().unthe('An A', PATTERN_A), 'A')
+ config["the"]["strip"] = True
+ self.assertEqual(
+ ThePlugin().unthe("The Something", PATTERN_THE), "Something"
+ )
+ self.assertEqual(ThePlugin().unthe("An A", PATTERN_A), "A")
def test_template_function_with_defaults(self):
ThePlugin().patterns = [PATTERN_THE, PATTERN_A]
- self.assertEqual(ThePlugin().the_template_func('The The'),
- 'The, The')
- self.assertEqual(ThePlugin().the_template_func('An A'), 'A, An')
+ self.assertEqual(ThePlugin().the_template_func("The The"), "The, The")
+ self.assertEqual(ThePlugin().the_template_func("An A"), "A, An")
def test_custom_pattern(self):
- config['the']['patterns'] = ['^test\\s']
- config['the']['format'] = FORMAT
- self.assertEqual(ThePlugin().the_template_func('test passed'),
- 'passed, test')
+ config["the"]["patterns"] = ["^test\\s"]
+ config["the"]["format"] = FORMAT
+ self.assertEqual(
+ ThePlugin().the_template_func("test passed"), "passed, test"
+ )
def test_custom_format(self):
- config['the']['patterns'] = [PATTERN_THE, PATTERN_A]
- config['the']['format'] = '{1} ({0})'
- self.assertEqual(ThePlugin().the_template_func('The A'), 'The (A)')
+ config["the"]["patterns"] = [PATTERN_THE, PATTERN_A]
+ config["the"]["format"] = "{1} ({0})"
+ self.assertEqual(ThePlugin().the_template_func("The A"), "The (A)")
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_thumbnails.py b/test/plugins/test_thumbnails.py
index f9920f6734..127cc67d82 100644
--- a/test/plugins/test_thumbnails.py
+++ b/test/plugins/test_thumbnails.py
@@ -14,16 +14,20 @@
import os.path
-from unittest.mock import Mock, patch, call
-from tempfile import mkdtemp
-from shutil import rmtree
import unittest
-
+from shutil import rmtree
+from tempfile import mkdtemp
from test.helper import TestHelper
+from unittest.mock import Mock, call, patch
from beets.util import bytestring_path, syspath
-from beetsplug.thumbnails import (ThumbnailsPlugin, NORMAL_DIR, LARGE_DIR,
- PathlibURI, GioURI)
+from beetsplug.thumbnails import (
+ LARGE_DIR,
+ NORMAL_DIR,
+ GioURI,
+ PathlibURI,
+ ThumbnailsPlugin,
+)
class ThumbnailsTest(unittest.TestCase, TestHelper):
@@ -33,29 +37,29 @@ def setUp(self):
def tearDown(self):
self.teardown_beets()
- @patch('beetsplug.thumbnails.ArtResizer')
- @patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok')
- @patch('beetsplug.thumbnails.os.stat')
+ @patch("beetsplug.thumbnails.ArtResizer")
+ @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok")
+ @patch("beetsplug.thumbnails.os.stat")
def test_add_tags(self, mock_stat, _, mock_artresizer):
plugin = ThumbnailsPlugin()
- plugin.get_uri = Mock(side_effect={b"/path/to/cover":
- "COVER_URI"}.__getitem__)
+ plugin.get_uri = Mock(
+ side_effect={b"/path/to/cover": "COVER_URI"}.__getitem__
+ )
album = Mock(artpath=b"/path/to/cover")
mock_stat.return_value.st_mtime = 12345
plugin.add_tags(album, b"/path/to/thumbnail")
- metadata = {"Thumb::URI": "COVER_URI",
- "Thumb::MTime": "12345"}
+ metadata = {"Thumb::URI": "COVER_URI", "Thumb::MTime": "12345"}
mock_artresizer.shared.write_metadata.assert_called_once_with(
b"/path/to/thumbnail",
metadata,
)
mock_stat.assert_called_once_with(syspath(album.artpath))
- @patch('beetsplug.thumbnails.os')
- @patch('beetsplug.thumbnails.ArtResizer')
- @patch('beetsplug.thumbnails.GioURI')
+ @patch("beetsplug.thumbnails.os")
+ @patch("beetsplug.thumbnails.ArtResizer")
+ @patch("beetsplug.thumbnails.GioURI")
def test_check_local_ok(self, mock_giouri, mock_artresizer, mock_os):
# test local resizing capability
mock_artresizer.shared.local = False
@@ -73,6 +77,7 @@ def exists(path):
if path == syspath(LARGE_DIR):
return True
raise ValueError(f"unexpected path {path!r}")
+
mock_os.path.exists = exists
plugin = ThumbnailsPlugin()
mock_os.makedirs.assert_called_once_with(syspath(NORMAL_DIR))
@@ -96,20 +101,22 @@ def exists(path):
self.assertEqual(ThumbnailsPlugin().get_uri, giouri_inst.uri)
giouri_inst.available = False
- self.assertEqual(ThumbnailsPlugin().get_uri.__self__.__class__,
- PathlibURI)
-
- @patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok')
- @patch('beetsplug.thumbnails.ArtResizer')
- @patch('beetsplug.thumbnails.util')
- @patch('beetsplug.thumbnails.os')
- @patch('beetsplug.thumbnails.shutil')
- def test_make_cover_thumbnail(self, mock_shutils, mock_os, mock_util,
- mock_artresizer, _):
+ self.assertEqual(
+ ThumbnailsPlugin().get_uri.__self__.__class__, PathlibURI
+ )
+
+ @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok")
+ @patch("beetsplug.thumbnails.ArtResizer")
+ @patch("beetsplug.thumbnails.util")
+ @patch("beetsplug.thumbnails.os")
+ @patch("beetsplug.thumbnails.shutil")
+ def test_make_cover_thumbnail(
+ self, mock_shutils, mock_os, mock_util, mock_artresizer, _
+ ):
thumbnail_dir = os.path.normpath(b"/thumbnail/dir")
md5_file = os.path.join(thumbnail_dir, b"md5")
path_to_art = os.path.normpath(b"/path/to/art")
- path_to_resized_art = os.path.normpath(b'/path/to/resized/artwork')
+ path_to_resized_art = os.path.normpath(b"/path/to/resized/artwork")
mock_os.path.join = os.path.join # don't mock that function
plugin = ThumbnailsPlugin()
@@ -117,7 +124,7 @@ def test_make_cover_thumbnail(self, mock_shutils, mock_os, mock_util,
album = Mock(artpath=path_to_art)
mock_util.syspath.side_effect = lambda x: x
- plugin.thumbnail_file_name = Mock(return_value=b'md5')
+ plugin.thumbnail_file_name = Mock(return_value=b"md5")
mock_os.path.exists.return_value = False
def os_stat(target):
@@ -127,6 +134,7 @@ def os_stat(target):
return Mock(st_mtime=2)
else:
raise ValueError(f"invalid target {target}")
+
mock_os.stat.side_effect = os_stat
mock_resize = mock_artresizer.shared.resize
@@ -138,8 +146,9 @@ def os_stat(target):
mock_resize.assert_called_once_with(12345, path_to_art, md5_file)
plugin.add_tags.assert_called_once_with(album, path_to_resized_art)
- mock_shutils.move.assert_called_once_with(syspath(path_to_resized_art),
- syspath(md5_file))
+ mock_shutils.move.assert_called_once_with(
+ syspath(path_to_resized_art), syspath(md5_file)
+ )
# now test with recent thumbnail & with force
mock_os.path.exists.return_value = True
@@ -153,27 +162,26 @@ def os_stat(target):
return Mock(st_mtime=2)
else:
raise ValueError(f"invalid target {target}")
+
mock_os.stat.side_effect = os_stat
plugin.make_cover_thumbnail(album, 12345, thumbnail_dir)
self.assertEqual(mock_resize.call_count, 0)
# and with force
- plugin.config['force'] = True
+ plugin.config["force"] = True
plugin.make_cover_thumbnail(album, 12345, thumbnail_dir)
mock_resize.assert_called_once_with(12345, path_to_art, md5_file)
- @patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok')
+ @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok")
def test_make_dolphin_cover_thumbnail(self, _):
plugin = ThumbnailsPlugin()
tmp = bytestring_path(mkdtemp())
- album = Mock(path=tmp,
- artpath=os.path.join(tmp, b"cover.jpg"))
+ album = Mock(path=tmp, artpath=os.path.join(tmp, b"cover.jpg"))
plugin.make_dolphin_cover_thumbnail(album)
with open(os.path.join(tmp, b".directory"), "rb") as f:
self.assertEqual(
- f.read().splitlines(),
- [b"[Desktop Entry]", b"Icon=./cover.jpg"]
+ f.read().splitlines(), [b"[Desktop Entry]", b"Icon=./cover.jpg"]
)
# not rewritten when it already exists (yup that's a big limitation)
@@ -181,14 +189,13 @@ def test_make_dolphin_cover_thumbnail(self, _):
plugin.make_dolphin_cover_thumbnail(album)
with open(os.path.join(tmp, b".directory"), "rb") as f:
self.assertEqual(
- f.read().splitlines(),
- [b"[Desktop Entry]", b"Icon=./cover.jpg"]
+ f.read().splitlines(), [b"[Desktop Entry]", b"Icon=./cover.jpg"]
)
rmtree(syspath(tmp))
- @patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok')
- @patch('beetsplug.thumbnails.ArtResizer')
+ @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok")
+ @patch("beetsplug.thumbnails.ArtResizer")
def test_process_album(self, mock_artresizer, _):
get_size = mock_artresizer.shared.get_size
@@ -210,11 +217,11 @@ def test_process_album(self, mock_artresizer, _):
self.assertEqual(make_cover.call_count, 0)
# dolphin tests
- plugin.config['dolphin'] = False
+ plugin.config["dolphin"] = False
plugin.process_album(album)
self.assertEqual(make_dolphin.call_count, 0)
- plugin.config['dolphin'] = True
+ plugin.config["dolphin"] = True
plugin.process_album(album)
make_dolphin.assert_called_once_with(album)
@@ -227,12 +234,13 @@ def test_process_album(self, mock_artresizer, _):
make_cover.reset_mock()
get_size.return_value = 500, 500
plugin.process_album(album)
- make_cover.assert_has_calls([call(album, 128, NORMAL_DIR),
- call(album, 256, LARGE_DIR)],
- any_order=True)
+ make_cover.assert_has_calls(
+ [call(album, 128, NORMAL_DIR), call(album, 256, LARGE_DIR)],
+ any_order=True,
+ )
- @patch('beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok')
- @patch('beetsplug.thumbnails.decargs')
+ @patch("beetsplug.thumbnails.ThumbnailsPlugin._check_local_ok")
+ @patch("beetsplug.thumbnails.decargs")
def test_invokations(self, mock_decargs, _):
plugin = ThumbnailsPlugin()
plugin.process_album = Mock()
@@ -244,15 +252,18 @@ def test_invokations(self, mock_decargs, _):
lib.albums.return_value = [album, album2]
plugin.process_query(lib, Mock(), None)
lib.albums.assert_called_once_with(mock_decargs.return_value)
- plugin.process_album.assert_has_calls([call(album), call(album2)],
- any_order=True)
+ plugin.process_album.assert_has_calls(
+ [call(album), call(album2)], any_order=True
+ )
- @patch('beetsplug.thumbnails.BaseDirectory')
+ @patch("beetsplug.thumbnails.BaseDirectory")
def test_thumbnail_file_name(self, mock_basedir):
plug = ThumbnailsPlugin()
plug.get_uri = Mock(return_value="file:///my/uri")
- self.assertEqual(plug.thumbnail_file_name(b'idontcare'),
- b"9488f5797fbe12ffb316d607dfd93d04.png")
+ self.assertEqual(
+ plug.thumbnail_file_name(b"idontcare"),
+ b"9488f5797fbe12ffb316d607dfd93d04.png",
+ )
def test_uri(self):
gio = GioURI()
@@ -263,21 +274,24 @@ def test_uri(self):
self.assertEqual(gio.uri(b"/foo"), "file:///foo")
self.assertEqual(gio.uri(b"/foo!"), "file:///foo!")
self.assertEqual(
- gio.uri(b'/music/\xec\x8b\xb8\xec\x9d\xb4'),
- 'file:///music/%EC%8B%B8%EC%9D%B4')
+ gio.uri(b"/music/\xec\x8b\xb8\xec\x9d\xb4"),
+ "file:///music/%EC%8B%B8%EC%9D%B4",
+ )
-class TestPathlibURI():
+class TestPathlibURI:
"""Test PathlibURI class"""
+
def test_uri(self):
test_uri = PathlibURI()
# test it won't break if we pass it bytes for a path
- test_uri.uri(b'/')
+ test_uri.uri(b"/")
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_types_plugin.py b/test/plugins/test_types_plugin.py
index 5a3bdcc49b..6488ca0ab8 100644
--- a/test/plugins/test_types_plugin.py
+++ b/test/plugins/test_types_plugin.py
@@ -14,97 +14,95 @@
import time
-from datetime import datetime
import unittest
-
+from datetime import datetime
from test.helper import TestHelper
from confuse import ConfigValueError
class TypesPluginTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_beets()
- self.load_plugins('types')
+ self.load_plugins("types")
def tearDown(self):
self.unload_plugins()
self.teardown_beets()
def test_integer_modify_and_query(self):
- self.config['types'] = {'myint': 'int'}
- item = self.add_item(artist='aaa')
+ self.config["types"] = {"myint": "int"}
+ item = self.add_item(artist="aaa")
# Do not match unset values
- out = self.list('myint:1..3')
- self.assertEqual('', out)
+ out = self.list("myint:1..3")
+ self.assertEqual("", out)
- self.modify('myint=2')
+ self.modify("myint=2")
item.load()
- self.assertEqual(item['myint'], 2)
+ self.assertEqual(item["myint"], 2)
# Match in range
- out = self.list('myint:1..3')
- self.assertIn('aaa', out)
+ out = self.list("myint:1..3")
+ self.assertIn("aaa", out)
def test_album_integer_modify_and_query(self):
- self.config['types'] = {'myint': 'int'}
- album = self.add_album(albumartist='aaa')
+ self.config["types"] = {"myint": "int"}
+ album = self.add_album(albumartist="aaa")
# Do not match unset values
- out = self.list_album('myint:1..3')
- self.assertEqual('', out)
+ out = self.list_album("myint:1..3")
+ self.assertEqual("", out)
- self.modify('-a', 'myint=2')
+ self.modify("-a", "myint=2")
album.load()
- self.assertEqual(album['myint'], 2)
+ self.assertEqual(album["myint"], 2)
# Match in range
- out = self.list_album('myint:1..3')
- self.assertIn('aaa', out)
+ out = self.list_album("myint:1..3")
+ self.assertIn("aaa", out)
def test_float_modify_and_query(self):
- self.config['types'] = {'myfloat': 'float'}
- item = self.add_item(artist='aaa')
+ self.config["types"] = {"myfloat": "float"}
+ item = self.add_item(artist="aaa")
# Do not match unset values
- out = self.list('myfloat:10..0')
- self.assertEqual('', out)
+ out = self.list("myfloat:10..0")
+ self.assertEqual("", out)
- self.modify('myfloat=-9.1')
+ self.modify("myfloat=-9.1")
item.load()
- self.assertEqual(item['myfloat'], -9.1)
+ self.assertEqual(item["myfloat"], -9.1)
# Match in range
- out = self.list('myfloat:-10..0')
- self.assertIn('aaa', out)
+ out = self.list("myfloat:-10..0")
+ self.assertIn("aaa", out)
def test_bool_modify_and_query(self):
- self.config['types'] = {'mybool': 'bool'}
- true = self.add_item(artist='true')
- false = self.add_item(artist='false')
- self.add_item(artist='unset')
+ self.config["types"] = {"mybool": "bool"}
+ true = self.add_item(artist="true")
+ false = self.add_item(artist="false")
+ self.add_item(artist="unset")
# Do not match unset values
- out = self.list('mybool:true, mybool:false')
- self.assertEqual('', out)
+ out = self.list("mybool:true, mybool:false")
+ self.assertEqual("", out)
# Set true
- self.modify('mybool=1', 'artist:true')
+ self.modify("mybool=1", "artist:true")
true.load()
- self.assertEqual(true['mybool'], True)
+ self.assertEqual(true["mybool"], True)
# Set false
- self.modify('mybool=false', 'artist:false')
+ self.modify("mybool=false", "artist:false")
false.load()
- self.assertEqual(false['mybool'], False)
+ self.assertEqual(false["mybool"], False)
# Query bools
- out = self.list('mybool:true', '$artist $mybool')
- self.assertEqual('true True', out)
+ out = self.list("mybool:true", "$artist $mybool")
+ self.assertEqual("true True", out)
- out = self.list('mybool:false', '$artist $mybool')
+ out = self.list("mybool:false", "$artist $mybool")
# Dealing with unset fields?
# self.assertEqual('false False', out)
@@ -112,79 +110,90 @@ def test_bool_modify_and_query(self):
# self.assertIn('unset $mybool', out)
def test_date_modify_and_query(self):
- self.config['types'] = {'mydate': 'date'}
+ self.config["types"] = {"mydate": "date"}
# FIXME parsing should also work with default time format
- self.config['time_format'] = '%Y-%m-%d'
- old = self.add_item(artist='prince')
- new = self.add_item(artist='britney')
+ self.config["time_format"] = "%Y-%m-%d"
+ old = self.add_item(artist="prince")
+ new = self.add_item(artist="britney")
# Do not match unset values
- out = self.list('mydate:..2000')
- self.assertEqual('', out)
+ out = self.list("mydate:..2000")
+ self.assertEqual("", out)
- self.modify('mydate=1999-01-01', 'artist:prince')
+ self.modify("mydate=1999-01-01", "artist:prince")
old.load()
- self.assertEqual(old['mydate'], mktime(1999, 1, 1))
+ self.assertEqual(old["mydate"], mktime(1999, 1, 1))
- self.modify('mydate=1999-12-30', 'artist:britney')
+ self.modify("mydate=1999-12-30", "artist:britney")
new.load()
- self.assertEqual(new['mydate'], mktime(1999, 12, 30))
+ self.assertEqual(new["mydate"], mktime(1999, 12, 30))
# Match in range
- out = self.list('mydate:..1999-07', '$artist $mydate')
- self.assertEqual('prince 1999-01-01', out)
+ out = self.list("mydate:..1999-07", "$artist $mydate")
+ self.assertEqual("prince 1999-01-01", out)
# FIXME some sort of timezone issue here
# out = self.list('mydate:1999-12-30', '$artist $mydate')
# self.assertEqual('britney 1999-12-30', out)
def test_unknown_type_error(self):
- self.config['types'] = {'flex': 'unkown type'}
+ self.config["types"] = {"flex": "unkown type"}
with self.assertRaises(ConfigValueError):
- self.run_command('ls')
+ self.run_command("ls")
def test_template_if_def(self):
# Tests for a subtle bug when using %ifdef in templates along with
# types that have truthy default values (e.g. '0', '0.0', 'False')
# https://github.com/beetbox/beets/issues/3852
- self.config['types'] = {'playcount': 'int', 'rating': 'float',
- 'starred': 'bool'}
-
- with_fields = self.add_item(artist='prince')
- self.modify('playcount=10', 'artist=prince')
- self.modify('rating=5.0', 'artist=prince')
- self.modify('starred=yes', 'artist=prince')
+ self.config["types"] = {
+ "playcount": "int",
+ "rating": "float",
+ "starred": "bool",
+ }
+
+ with_fields = self.add_item(artist="prince")
+ self.modify("playcount=10", "artist=prince")
+ self.modify("rating=5.0", "artist=prince")
+ self.modify("starred=yes", "artist=prince")
with_fields.load()
- without_fields = self.add_item(artist='britney')
-
- int_template = '%ifdef{playcount,Play count: $playcount,Not played}'
- self.assertEqual(with_fields.evaluate_template(int_template),
- 'Play count: 10')
- self.assertEqual(without_fields.evaluate_template(int_template),
- 'Not played')
-
- float_template = '%ifdef{rating,Rating: $rating,Not rated}'
- self.assertEqual(with_fields.evaluate_template(float_template),
- 'Rating: 5.0')
- self.assertEqual(without_fields.evaluate_template(float_template),
- 'Not rated')
-
- bool_template = '%ifdef{starred,Starred: $starred,Not starred}'
- self.assertIn(with_fields.evaluate_template(bool_template).lower(),
- ('starred: true', 'starred: yes', 'starred: y'))
- self.assertEqual(without_fields.evaluate_template(bool_template),
- 'Not starred')
+ without_fields = self.add_item(artist="britney")
+
+ int_template = "%ifdef{playcount,Play count: $playcount,Not played}"
+ self.assertEqual(
+ with_fields.evaluate_template(int_template), "Play count: 10"
+ )
+ self.assertEqual(
+ without_fields.evaluate_template(int_template), "Not played"
+ )
+
+ float_template = "%ifdef{rating,Rating: $rating,Not rated}"
+ self.assertEqual(
+ with_fields.evaluate_template(float_template), "Rating: 5.0"
+ )
+ self.assertEqual(
+ without_fields.evaluate_template(float_template), "Not rated"
+ )
+
+ bool_template = "%ifdef{starred,Starred: $starred,Not starred}"
+ self.assertIn(
+ with_fields.evaluate_template(bool_template).lower(),
+ ("starred: true", "starred: yes", "starred: y"),
+ )
+ self.assertEqual(
+ without_fields.evaluate_template(bool_template), "Not starred"
+ )
def modify(self, *args):
- return self.run_with_output('modify', '--yes', '--nowrite',
- '--nomove', *args)
+ return self.run_with_output(
+ "modify", "--yes", "--nowrite", "--nomove", *args
+ )
- def list(self, query, fmt='$artist - $album - $title'):
- return self.run_with_output('ls', '-f', fmt, query).strip()
+ def list(self, query, fmt="$artist - $album - $title"):
+ return self.run_with_output("ls", "-f", fmt, query).strip()
- def list_album(self, query, fmt='$albumartist - $album - $title'):
- return self.run_with_output('ls', '-a', '-f', fmt, query).strip()
+ def list_album(self, query, fmt="$albumartist - $album - $title"):
+ return self.run_with_output("ls", "-a", "-f", fmt, query).strip()
def mktime(*args):
@@ -194,5 +203,6 @@ def mktime(*args):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_web.py b/test/plugins/test_web.py
index 42611b4b55..8da6f2350d 100644
--- a/test/plugins/test_web.py
+++ b/test/plugins/test_web.py
@@ -2,30 +2,26 @@
import json
-import unittest
import os.path
+import platform
import shutil
-
+import unittest
from test import _common
-from beets.library import Item, Album
-from beetsplug import web
-
-import platform
from beets import logging
+from beets.library import Album, Item
+from beetsplug import web
class WebPluginTest(_common.LibTestCase):
-
def setUp(self):
-
super().setUp()
- self.log = logging.getLogger('beets.web')
+ self.log = logging.getLogger("beets.web")
- if platform.system() == 'Windows':
- self.path_prefix = 'C:'
+ if platform.system() == "Windows":
+ self.path_prefix = "C:"
else:
- self.path_prefix = ''
+ self.path_prefix = ""
# Add fixtures
for track in self.lib.items():
@@ -34,326 +30,324 @@ def setUp(self):
# Add library elements. Note that self.lib.add overrides any "id="
# and assigns the next free id number.
# The following adds will create items #1, #2 and #3
- path1 = self.path_prefix + os.sep + \
- os.path.join(b'path_1').decode('utf-8')
- self.lib.add(Item(title='title',
- path=path1,
- album_id=2,
- artist='AAA Singers'))
- path2 = self.path_prefix + os.sep + \
- os.path.join(b'somewhere', b'a').decode('utf-8')
- self.lib.add(Item(title='another title',
- path=path2,
- artist='AAA Singers'))
- path3 = self.path_prefix + os.sep + \
- os.path.join(b'somewhere', b'abc').decode('utf-8')
- self.lib.add(Item(title='and a third',
- testattr='ABC',
- path=path3,
- album_id=2))
+ path1 = (
+ self.path_prefix + os.sep + os.path.join(b"path_1").decode("utf-8")
+ )
+ self.lib.add(
+ Item(title="title", path=path1, album_id=2, artist="AAA Singers")
+ )
+ path2 = (
+ self.path_prefix
+ + os.sep
+ + os.path.join(b"somewhere", b"a").decode("utf-8")
+ )
+ self.lib.add(
+ Item(title="another title", path=path2, artist="AAA Singers")
+ )
+ path3 = (
+ self.path_prefix
+ + os.sep
+ + os.path.join(b"somewhere", b"abc").decode("utf-8")
+ )
+ self.lib.add(
+ Item(title="and a third", testattr="ABC", path=path3, album_id=2)
+ )
# The following adds will create albums #1 and #2
- self.lib.add(Album(album='album',
- albumtest='xyz'))
- path4 = self.path_prefix + os.sep + \
- os.path.join(b'somewhere2', b'art_path_2').decode('utf-8')
- self.lib.add(Album(album='other album',
- artpath=path4))
-
- web.app.config['TESTING'] = True
- web.app.config['lib'] = self.lib
- web.app.config['INCLUDE_PATHS'] = False
- web.app.config['READONLY'] = True
+ self.lib.add(Album(album="album", albumtest="xyz"))
+ path4 = (
+ self.path_prefix
+ + os.sep
+ + os.path.join(b"somewhere2", b"art_path_2").decode("utf-8")
+ )
+ self.lib.add(Album(album="other album", artpath=path4))
+
+ web.app.config["TESTING"] = True
+ web.app.config["lib"] = self.lib
+ web.app.config["INCLUDE_PATHS"] = False
+ web.app.config["READONLY"] = True
self.client = web.app.test_client()
def test_config_include_paths_true(self):
- web.app.config['INCLUDE_PATHS'] = True
- response = self.client.get('/item/1')
- res_json = json.loads(response.data.decode('utf-8'))
- expected_path = self.path_prefix + os.sep \
- + os.path.join(b'path_1').decode('utf-8')
+ web.app.config["INCLUDE_PATHS"] = True
+ response = self.client.get("/item/1")
+ res_json = json.loads(response.data.decode("utf-8"))
+ expected_path = (
+ self.path_prefix + os.sep + os.path.join(b"path_1").decode("utf-8")
+ )
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['path'], expected_path)
+ self.assertEqual(res_json["path"], expected_path)
- web.app.config['INCLUDE_PATHS'] = False
+ web.app.config["INCLUDE_PATHS"] = False
def test_config_include_artpaths_true(self):
- web.app.config['INCLUDE_PATHS'] = True
- response = self.client.get('/album/2')
- res_json = json.loads(response.data.decode('utf-8'))
- expected_path = self.path_prefix + os.sep \
- + os.path.join(b'somewhere2', b'art_path_2').decode('utf-8')
+ web.app.config["INCLUDE_PATHS"] = True
+ response = self.client.get("/album/2")
+ res_json = json.loads(response.data.decode("utf-8"))
+ expected_path = (
+ self.path_prefix
+ + os.sep
+ + os.path.join(b"somewhere2", b"art_path_2").decode("utf-8")
+ )
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['artpath'], expected_path)
+ self.assertEqual(res_json["artpath"], expected_path)
- web.app.config['INCLUDE_PATHS'] = False
+ web.app.config["INCLUDE_PATHS"] = False
def test_config_include_paths_false(self):
- web.app.config['INCLUDE_PATHS'] = False
- response = self.client.get('/item/1')
- res_json = json.loads(response.data.decode('utf-8'))
+ web.app.config["INCLUDE_PATHS"] = False
+ response = self.client.get("/item/1")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertNotIn('path', res_json)
+ self.assertNotIn("path", res_json)
def test_config_include_artpaths_false(self):
- web.app.config['INCLUDE_PATHS'] = False
- response = self.client.get('/album/2')
- res_json = json.loads(response.data.decode('utf-8'))
+ web.app.config["INCLUDE_PATHS"] = False
+ response = self.client.get("/album/2")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertNotIn('artpath', res_json)
+ self.assertNotIn("artpath", res_json)
def test_get_all_items(self):
- response = self.client.get('/item/')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['items']), 3)
+ self.assertEqual(len(res_json["items"]), 3)
def test_get_single_item_by_id(self):
- response = self.client.get('/item/1')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/1")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], 1)
- self.assertEqual(res_json['title'], 'title')
+ self.assertEqual(res_json["id"], 1)
+ self.assertEqual(res_json["title"], "title")
def test_get_multiple_items_by_id(self):
- response = self.client.get('/item/1,2')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/1,2")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['items']), 2)
- response_titles = {item['title'] for item in res_json['items']}
- self.assertEqual(response_titles, {'title', 'another title'})
+ self.assertEqual(len(res_json["items"]), 2)
+ response_titles = {item["title"] for item in res_json["items"]}
+ self.assertEqual(response_titles, {"title", "another title"})
def test_get_single_item_not_found(self):
- response = self.client.get('/item/4')
+ response = self.client.get("/item/4")
self.assertEqual(response.status_code, 404)
def test_get_single_item_by_path(self):
- data_path = os.path.join(_common.RSRC, b'full.mp3')
+ data_path = os.path.join(_common.RSRC, b"full.mp3")
self.lib.add(Item.from_path(data_path))
- response = self.client.get('/item/path/' + data_path.decode('utf-8'))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/path/" + data_path.decode("utf-8"))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['title'], 'full')
+ self.assertEqual(res_json["title"], "full")
def test_get_single_item_by_path_not_found_if_not_in_library(self):
- data_path = os.path.join(_common.RSRC, b'full.mp3')
+ data_path = os.path.join(_common.RSRC, b"full.mp3")
# data_path points to a valid file, but we have not added the file
# to the library.
- response = self.client.get('/item/path/' + data_path.decode('utf-8'))
+ response = self.client.get("/item/path/" + data_path.decode("utf-8"))
self.assertEqual(response.status_code, 404)
def test_get_item_empty_query(self):
- """ testing item query: """
- response = self.client.get('/item/query/')
- res_json = json.loads(response.data.decode('utf-8'))
+ """testing item query: """
+ response = self.client.get("/item/query/")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['items']), 3)
+ self.assertEqual(len(res_json["items"]), 3)
def test_get_simple_item_query(self):
- """ testing item query: another """
- response = self.client.get('/item/query/another')
- res_json = json.loads(response.data.decode('utf-8'))
+ """testing item query: another"""
+ response = self.client.get("/item/query/another")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
- self.assertEqual(res_json['results'][0]['title'],
- 'another title')
+ self.assertEqual(len(res_json["results"]), 1)
+ self.assertEqual(res_json["results"][0]["title"], "another title")
def test_query_item_string(self):
- """ testing item query: testattr:ABC """
- response = self.client.get('/item/query/testattr%3aABC')
- res_json = json.loads(response.data.decode('utf-8'))
+ """testing item query: testattr:ABC"""
+ response = self.client.get("/item/query/testattr%3aABC")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
- self.assertEqual(res_json['results'][0]['title'],
- 'and a third')
+ self.assertEqual(len(res_json["results"]), 1)
+ self.assertEqual(res_json["results"][0]["title"], "and a third")
def test_query_item_regex(self):
- """ testing item query: testattr::[A-C]+ """
- response = self.client.get('/item/query/testattr%3a%3a[A-C]%2b')
- res_json = json.loads(response.data.decode('utf-8'))
+ """testing item query: testattr::[A-C]+"""
+ response = self.client.get("/item/query/testattr%3a%3a[A-C]%2b")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
- self.assertEqual(res_json['results'][0]['title'],
- 'and a third')
+ self.assertEqual(len(res_json["results"]), 1)
+ self.assertEqual(res_json["results"][0]["title"], "and a third")
def test_query_item_regex_backslash(self):
# """ testing item query: testattr::\w+ """
- response = self.client.get('/item/query/testattr%3a%3a%5cw%2b')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/query/testattr%3a%3a%5cw%2b")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
- self.assertEqual(res_json['results'][0]['title'],
- 'and a third')
+ self.assertEqual(len(res_json["results"]), 1)
+ self.assertEqual(res_json["results"][0]["title"], "and a third")
def test_query_item_path(self):
# """ testing item query: path:\somewhere\a """
- """ Note: path queries are special: the query item must match the path
- from the root all the way to a directory, so this matches 1 item """
+ """Note: path queries are special: the query item must match the path
+ from the root all the way to a directory, so this matches 1 item"""
""" Note: filesystem separators in the query must be '\' """
- response = self.client.get('/item/query/path:'
- + self.path_prefix
- + '\\somewhere\\a')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get(
+ "/item/query/path:" + self.path_prefix + "\\somewhere\\a"
+ )
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
- self.assertEqual(res_json['results'][0]['title'],
- 'another title')
+ self.assertEqual(len(res_json["results"]), 1)
+ self.assertEqual(res_json["results"][0]["title"], "another title")
def test_get_all_albums(self):
- response = self.client.get('/album/')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- response_albums = [album['album'] for album in res_json['albums']]
- self.assertCountEqual(response_albums, ['album', 'other album'])
+ response_albums = [album["album"] for album in res_json["albums"]]
+ self.assertCountEqual(response_albums, ["album", "other album"])
def test_get_single_album_by_id(self):
- response = self.client.get('/album/2')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/2")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], 2)
- self.assertEqual(res_json['album'], 'other album')
+ self.assertEqual(res_json["id"], 2)
+ self.assertEqual(res_json["album"], "other album")
def test_get_multiple_albums_by_id(self):
- response = self.client.get('/album/1,2')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/1,2")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- response_albums = [album['album'] for album in res_json['albums']]
- self.assertCountEqual(response_albums, ['album', 'other album'])
+ response_albums = [album["album"] for album in res_json["albums"]]
+ self.assertCountEqual(response_albums, ["album", "other album"])
def test_get_album_empty_query(self):
- response = self.client.get('/album/query/')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/query/")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['albums']), 2)
+ self.assertEqual(len(res_json["albums"]), 2)
def test_get_simple_album_query(self):
- response = self.client.get('/album/query/other')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/query/other")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
- self.assertEqual(res_json['results'][0]['album'],
- 'other album')
- self.assertEqual(res_json['results'][0]['id'], 2)
+ self.assertEqual(len(res_json["results"]), 1)
+ self.assertEqual(res_json["results"][0]["album"], "other album")
+ self.assertEqual(res_json["results"][0]["id"], 2)
def test_get_album_details(self):
- response = self.client.get('/album/2?expand')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/2?expand")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['items']), 2)
- self.assertEqual(res_json['items'][0]['album'],
- 'other album')
- self.assertEqual(res_json['items'][1]['album'],
- 'other album')
- response_track_titles = {item['title'] for item in res_json['items']}
- self.assertEqual(response_track_titles, {'title', 'and a third'})
+ self.assertEqual(len(res_json["items"]), 2)
+ self.assertEqual(res_json["items"][0]["album"], "other album")
+ self.assertEqual(res_json["items"][1]["album"], "other album")
+ response_track_titles = {item["title"] for item in res_json["items"]}
+ self.assertEqual(response_track_titles, {"title", "and a third"})
def test_query_album_string(self):
- """ testing query: albumtest:xy """
- response = self.client.get('/album/query/albumtest%3axy')
- res_json = json.loads(response.data.decode('utf-8'))
+ """testing query: albumtest:xy"""
+ response = self.client.get("/album/query/albumtest%3axy")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
- self.assertEqual(res_json['results'][0]['album'],
- 'album')
+ self.assertEqual(len(res_json["results"]), 1)
+ self.assertEqual(res_json["results"][0]["album"], "album")
def test_query_album_artpath_regex(self):
- """ testing query: artpath::art_ """
- response = self.client.get('/album/query/artpath%3a%3aart_')
- res_json = json.loads(response.data.decode('utf-8'))
+ """testing query: artpath::art_"""
+ response = self.client.get("/album/query/artpath%3a%3aart_")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
- self.assertEqual(res_json['results'][0]['album'],
- 'other album')
+ self.assertEqual(len(res_json["results"]), 1)
+ self.assertEqual(res_json["results"][0]["album"], "other album")
def test_query_album_regex_backslash(self):
# """ testing query: albumtest::\w+ """
- response = self.client.get('/album/query/albumtest%3a%3a%5cw%2b')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/query/albumtest%3a%3a%5cw%2b")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
- self.assertEqual(res_json['results'][0]['album'],
- 'album')
+ self.assertEqual(len(res_json["results"]), 1)
+ self.assertEqual(res_json["results"][0]["album"], "album")
def test_get_stats(self):
- response = self.client.get('/stats')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/stats")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['items'], 3)
- self.assertEqual(res_json['albums'], 2)
+ self.assertEqual(res_json["items"], 3)
+ self.assertEqual(res_json["albums"], 2)
def test_delete_item_id(self):
-
- web.app.config['READONLY'] = False
+ web.app.config["READONLY"] = False
# Create a temporary item
- item_id = self.lib.add(Item(title='test_delete_item_id',
- test_delete_item_id=1))
+ item_id = self.lib.add(
+ Item(title="test_delete_item_id", test_delete_item_id=1)
+ )
# Check we can find the temporary item we just created
- response = self.client.get('/item/' + str(item_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/" + str(item_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], item_id)
+ self.assertEqual(res_json["id"], item_id)
# Delete item by id
- response = self.client.delete('/item/' + str(item_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.delete("/item/" + str(item_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
# Check the item has gone
- response = self.client.get('/item/' + str(item_id))
+ response = self.client.get("/item/" + str(item_id))
self.assertEqual(response.status_code, 404)
# Note: if this fails, the item may still be around
# and may cause other tests to fail
def test_delete_item_without_file(self):
-
- web.app.config['READONLY'] = False
+ web.app.config["READONLY"] = False
# Create an item with a file
- ipath = os.path.join(self.temp_dir, b'testfile1.mp3')
- shutil.copy(os.path.join(_common.RSRC, b'full.mp3'), ipath)
+ ipath = os.path.join(self.temp_dir, b"testfile1.mp3")
+ shutil.copy(os.path.join(_common.RSRC, b"full.mp3"), ipath)
self.assertTrue(os.path.exists(ipath))
item_id = self.lib.add(Item.from_path(ipath))
# Check we can find the temporary item we just created
- response = self.client.get('/item/' + str(item_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/" + str(item_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], item_id)
+ self.assertEqual(res_json["id"], item_id)
# Delete item by id, without deleting file
- response = self.client.delete('/item/' + str(item_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.delete("/item/" + str(item_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
# Check the item has gone
- response = self.client.get('/item/' + str(item_id))
+ response = self.client.get("/item/" + str(item_id))
self.assertEqual(response.status_code, 404)
# Check the file has not gone
@@ -361,238 +355,237 @@ def test_delete_item_without_file(self):
os.remove(ipath)
def test_delete_item_with_file(self):
-
- web.app.config['READONLY'] = False
+ web.app.config["READONLY"] = False
# Create an item with a file
- ipath = os.path.join(self.temp_dir, b'testfile2.mp3')
- shutil.copy(os.path.join(_common.RSRC, b'full.mp3'), ipath)
+ ipath = os.path.join(self.temp_dir, b"testfile2.mp3")
+ shutil.copy(os.path.join(_common.RSRC, b"full.mp3"), ipath)
self.assertTrue(os.path.exists(ipath))
item_id = self.lib.add(Item.from_path(ipath))
# Check we can find the temporary item we just created
- response = self.client.get('/item/' + str(item_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/" + str(item_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], item_id)
+ self.assertEqual(res_json["id"], item_id)
# Delete item by id, with file
- response = self.client.delete('/item/' + str(item_id) + '?delete')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.delete("/item/" + str(item_id) + "?delete")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
# Check the item has gone
- response = self.client.get('/item/' + str(item_id))
+ response = self.client.get("/item/" + str(item_id))
self.assertEqual(response.status_code, 404)
# Check the file has gone
self.assertFalse(os.path.exists(ipath))
def test_delete_item_query(self):
-
- web.app.config['READONLY'] = False
+ web.app.config["READONLY"] = False
# Create a temporary item
- self.lib.add(Item(title='test_delete_item_query',
- test_delete_item_query=1))
+ self.lib.add(
+ Item(title="test_delete_item_query", test_delete_item_query=1)
+ )
# Check we can find the temporary item we just created
- response = self.client.get('/item/query/test_delete_item_query')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/query/test_delete_item_query")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
+ self.assertEqual(len(res_json["results"]), 1)
# Delete item by query
- response = self.client.delete('/item/query/test_delete_item_query')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.delete("/item/query/test_delete_item_query")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
# Check the item has gone
- response = self.client.get('/item/query/test_delete_item_query')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/query/test_delete_item_query")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 0)
+ self.assertEqual(len(res_json["results"]), 0)
def test_delete_item_all_fails(self):
- """ DELETE is not supported for list all """
+ """DELETE is not supported for list all"""
- web.app.config['READONLY'] = False
+ web.app.config["READONLY"] = False
# Delete all items
- response = self.client.delete('/item/')
+ response = self.client.delete("/item/")
self.assertEqual(response.status_code, 405)
# Note: if this fails, all items have gone and rest of
# tests will fail!
def test_delete_item_id_readonly(self):
-
- web.app.config['READONLY'] = True
+ web.app.config["READONLY"] = True
# Create a temporary item
- item_id = self.lib.add(Item(title='test_delete_item_id_ro',
- test_delete_item_id_ro=1))
+ item_id = self.lib.add(
+ Item(title="test_delete_item_id_ro", test_delete_item_id_ro=1)
+ )
# Check we can find the temporary item we just created
- response = self.client.get('/item/' + str(item_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/" + str(item_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], item_id)
+ self.assertEqual(res_json["id"], item_id)
# Try to delete item by id
- response = self.client.delete('/item/' + str(item_id))
+ response = self.client.delete("/item/" + str(item_id))
self.assertEqual(response.status_code, 405)
# Check the item has not gone
- response = self.client.get('/item/' + str(item_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/" + str(item_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], item_id)
+ self.assertEqual(res_json["id"], item_id)
# Remove it
self.lib.get_item(item_id).remove()
def test_delete_item_query_readonly(self):
-
- web.app.config['READONLY'] = True
+ web.app.config["READONLY"] = True
# Create a temporary item
- item_id = self.lib.add(Item(title='test_delete_item_q_ro',
- test_delete_item_q_ro=1))
+ item_id = self.lib.add(
+ Item(title="test_delete_item_q_ro", test_delete_item_q_ro=1)
+ )
# Check we can find the temporary item we just created
- response = self.client.get('/item/query/test_delete_item_q_ro')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/query/test_delete_item_q_ro")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
+ self.assertEqual(len(res_json["results"]), 1)
# Try to delete item by query
- response = self.client.delete('/item/query/test_delete_item_q_ro')
+ response = self.client.delete("/item/query/test_delete_item_q_ro")
self.assertEqual(response.status_code, 405)
# Check the item has not gone
- response = self.client.get('/item/query/test_delete_item_q_ro')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/query/test_delete_item_q_ro")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
+ self.assertEqual(len(res_json["results"]), 1)
# Remove it
self.lib.get_item(item_id).remove()
def test_delete_album_id(self):
-
- web.app.config['READONLY'] = False
+ web.app.config["READONLY"] = False
# Create a temporary album
- album_id = self.lib.add(Album(album='test_delete_album_id',
- test_delete_album_id=1))
+ album_id = self.lib.add(
+ Album(album="test_delete_album_id", test_delete_album_id=1)
+ )
# Check we can find the temporary album we just created
- response = self.client.get('/album/' + str(album_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/" + str(album_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], album_id)
+ self.assertEqual(res_json["id"], album_id)
# Delete album by id
- response = self.client.delete('/album/' + str(album_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.delete("/album/" + str(album_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
# Check the album has gone
- response = self.client.get('/album/' + str(album_id))
+ response = self.client.get("/album/" + str(album_id))
self.assertEqual(response.status_code, 404)
# Note: if this fails, the album may still be around
# and may cause other tests to fail
def test_delete_album_query(self):
-
- web.app.config['READONLY'] = False
+ web.app.config["READONLY"] = False
# Create a temporary album
- self.lib.add(Album(album='test_delete_album_query',
- test_delete_album_query=1))
+ self.lib.add(
+ Album(album="test_delete_album_query", test_delete_album_query=1)
+ )
# Check we can find the temporary album we just created
- response = self.client.get('/album/query/test_delete_album_query')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/query/test_delete_album_query")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
+ self.assertEqual(len(res_json["results"]), 1)
# Delete album
- response = self.client.delete('/album/query/test_delete_album_query')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.delete("/album/query/test_delete_album_query")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
# Check the album has gone
- response = self.client.get('/album/query/test_delete_album_query')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/query/test_delete_album_query")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 0)
+ self.assertEqual(len(res_json["results"]), 0)
def test_delete_album_all_fails(self):
- """ DELETE is not supported for list all """
+ """DELETE is not supported for list all"""
- web.app.config['READONLY'] = False
+ web.app.config["READONLY"] = False
# Delete all albums
- response = self.client.delete('/album/')
+ response = self.client.delete("/album/")
self.assertEqual(response.status_code, 405)
# Note: if this fails, all albums have gone and rest of
# tests will fail!
def test_delete_album_id_readonly(self):
-
- web.app.config['READONLY'] = True
+ web.app.config["READONLY"] = True
# Create a temporary album
- album_id = self.lib.add(Album(album='test_delete_album_id_ro',
- test_delete_album_id_ro=1))
+ album_id = self.lib.add(
+ Album(album="test_delete_album_id_ro", test_delete_album_id_ro=1)
+ )
# Check we can find the temporary album we just created
- response = self.client.get('/album/' + str(album_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/" + str(album_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], album_id)
+ self.assertEqual(res_json["id"], album_id)
# Try to delete album by id
- response = self.client.delete('/album/' + str(album_id))
+ response = self.client.delete("/album/" + str(album_id))
self.assertEqual(response.status_code, 405)
# Check the item has not gone
- response = self.client.get('/album/' + str(album_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/" + str(album_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], album_id)
+ self.assertEqual(res_json["id"], album_id)
# Remove it
self.lib.get_album(album_id).remove()
def test_delete_album_query_readonly(self):
-
- web.app.config['READONLY'] = True
+ web.app.config["READONLY"] = True
# Create a temporary album
- album_id = self.lib.add(Album(album='test_delete_album_query_ro',
- test_delete_album_query_ro=1))
+ album_id = self.lib.add(
+ Album(
+ album="test_delete_album_query_ro", test_delete_album_query_ro=1
+ )
+ )
# Check we can find the temporary album we just created
- response = self.client.get('/album/query/test_delete_album_query_ro')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/query/test_delete_album_query_ro")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
+ self.assertEqual(len(res_json["results"]), 1)
# Try to delete album
- response = self.client.delete(
- '/album/query/test_delete_album_query_ro'
- )
+ response = self.client.delete("/album/query/test_delete_album_query_ro")
self.assertEqual(response.status_code, 405)
# Check the album has not gone
- response = self.client.get('/album/query/test_delete_album_query_ro')
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/album/query/test_delete_album_query_ro")
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(len(res_json['results']), 1)
+ self.assertEqual(len(res_json["results"]), 1)
# Remove it
self.lib.get_album(album_id).remove()
@@ -600,41 +593,44 @@ def test_delete_album_query_readonly(self):
def test_patch_item_id(self):
# Note: PATCH is currently only implemented for track items, not albums
- web.app.config['READONLY'] = False
+ web.app.config["READONLY"] = False
# Create a temporary item
- item_id = self.lib.add(Item(title='test_patch_item_id',
- test_patch_f1=1,
- test_patch_f2="Old"))
+ item_id = self.lib.add(
+ Item(
+ title="test_patch_item_id", test_patch_f1=1, test_patch_f2="Old"
+ )
+ )
# Check we can find the temporary item we just created
- response = self.client.get('/item/' + str(item_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/" + str(item_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], item_id)
+ self.assertEqual(res_json["id"], item_id)
self.assertEqual(
- [res_json['test_patch_f1'], res_json['test_patch_f2']],
- ['1', 'Old'])
+ [res_json["test_patch_f1"], res_json["test_patch_f2"]], ["1", "Old"]
+ )
# Patch item by id
# patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"}]})
- response = self.client.patch('/item/' + str(item_id),
- json={"test_patch_f2": "New"})
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.patch(
+ "/item/" + str(item_id), json={"test_patch_f2": "New"}
+ )
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], item_id)
+ self.assertEqual(res_json["id"], item_id)
self.assertEqual(
- [res_json['test_patch_f1'], res_json['test_patch_f2']],
- ['1', 'New'])
+ [res_json["test_patch_f1"], res_json["test_patch_f2"]], ["1", "New"]
+ )
# Check the update has really worked
- response = self.client.get('/item/' + str(item_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/" + str(item_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], item_id)
+ self.assertEqual(res_json["id"], item_id)
self.assertEqual(
- [res_json['test_patch_f1'], res_json['test_patch_f2']],
- ['1', 'New'])
+ [res_json["test_patch_f1"], res_json["test_patch_f2"]], ["1", "New"]
+ )
# Remove the item
self.lib.get_item(item_id).remove()
@@ -642,38 +638,43 @@ def test_patch_item_id(self):
def test_patch_item_id_readonly(self):
# Note: PATCH is currently only implemented for track items, not albums
- web.app.config['READONLY'] = True
+ web.app.config["READONLY"] = True
# Create a temporary item
- item_id = self.lib.add(Item(title='test_patch_item_id_ro',
- test_patch_f1=2,
- test_patch_f2="Old"))
+ item_id = self.lib.add(
+ Item(
+ title="test_patch_item_id_ro",
+ test_patch_f1=2,
+ test_patch_f2="Old",
+ )
+ )
# Check we can find the temporary item we just created
- response = self.client.get('/item/' + str(item_id))
- res_json = json.loads(response.data.decode('utf-8'))
+ response = self.client.get("/item/" + str(item_id))
+ res_json = json.loads(response.data.decode("utf-8"))
self.assertEqual(response.status_code, 200)
- self.assertEqual(res_json['id'], item_id)
+ self.assertEqual(res_json["id"], item_id)
self.assertEqual(
- [res_json['test_patch_f1'], res_json['test_patch_f2']],
- ['2', 'Old'])
+ [res_json["test_patch_f1"], res_json["test_patch_f2"]], ["2", "Old"]
+ )
# Patch item by id
# patch_json = json.JSONEncoder().encode({"test_patch_f2": "New"})
- response = self.client.patch('/item/' + str(item_id),
- json={"test_patch_f2": "New"})
+ response = self.client.patch(
+ "/item/" + str(item_id), json={"test_patch_f2": "New"}
+ )
self.assertEqual(response.status_code, 405)
# Remove the item
self.lib.get_item(item_id).remove()
def test_get_item_file(self):
- ipath = os.path.join(self.temp_dir, b'testfile2.mp3')
- shutil.copy(os.path.join(_common.RSRC, b'full.mp3'), ipath)
+ ipath = os.path.join(self.temp_dir, b"testfile2.mp3")
+ shutil.copy(os.path.join(_common.RSRC, b"full.mp3"), ipath)
self.assertTrue(os.path.exists(ipath))
item_id = self.lib.add(Item.from_path(ipath))
- response = self.client.get('/item/' + str(item_id) + '/file')
+ response = self.client.get("/item/" + str(item_id) + "/file")
self.assertEqual(response.status_code, 200)
@@ -682,5 +683,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/plugins/test_zero.py b/test/plugins/test_zero.py
index b48367b415..50b00a7076 100644
--- a/test/plugins/test_zero.py
+++ b/test/plugins/test_zero.py
@@ -4,19 +4,20 @@
import unittest
from test.helper import TestHelper, control_stdin
-from beets.library import Item
-from beetsplug.zero import ZeroPlugin
from mediafile import MediaFile
+
+from beets.library import Item
from beets.util import syspath
+from beetsplug.zero import ZeroPlugin
class ZeroPluginTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
- self.config['zero'] = {
- 'fields': [],
- 'keep_fields': [],
- 'update_database': False,
+ self.config["zero"] = {
+ "fields": [],
+ "keep_fields": [],
+ "update_database": False,
}
def tearDown(self):
@@ -25,166 +26,157 @@ def tearDown(self):
self.unload_plugins()
def test_no_patterns(self):
- self.config['zero']['fields'] = ['comments', 'month']
+ self.config["zero"]["fields"] = ["comments", "month"]
item = self.add_item_fixture(
- comments='test comment',
- title='Title',
+ comments="test comment",
+ title="Title",
month=1,
year=2000,
)
item.write()
- self.load_plugins('zero')
+ self.load_plugins("zero")
item.write()
mf = MediaFile(syspath(item.path))
self.assertIsNone(mf.comments)
self.assertIsNone(mf.month)
- self.assertEqual(mf.title, 'Title')
+ self.assertEqual(mf.title, "Title")
self.assertEqual(mf.year, 2000)
def test_pattern_match(self):
- self.config['zero']['fields'] = ['comments']
- self.config['zero']['comments'] = ['encoded by']
+ self.config["zero"]["fields"] = ["comments"]
+ self.config["zero"]["comments"] = ["encoded by"]
- item = self.add_item_fixture(comments='encoded by encoder')
+ item = self.add_item_fixture(comments="encoded by encoder")
item.write()
- self.load_plugins('zero')
+ self.load_plugins("zero")
item.write()
mf = MediaFile(syspath(item.path))
self.assertIsNone(mf.comments)
def test_pattern_nomatch(self):
- self.config['zero']['fields'] = ['comments']
- self.config['zero']['comments'] = ['encoded by']
+ self.config["zero"]["fields"] = ["comments"]
+ self.config["zero"]["comments"] = ["encoded by"]
- item = self.add_item_fixture(comments='recorded at place')
+ item = self.add_item_fixture(comments="recorded at place")
item.write()
- self.load_plugins('zero')
+ self.load_plugins("zero")
item.write()
mf = MediaFile(syspath(item.path))
- self.assertEqual(mf.comments, 'recorded at place')
+ self.assertEqual(mf.comments, "recorded at place")
def test_do_not_change_database(self):
- self.config['zero']['fields'] = ['year']
+ self.config["zero"]["fields"] = ["year"]
item = self.add_item_fixture(year=2000)
item.write()
- self.load_plugins('zero')
+ self.load_plugins("zero")
item.write()
- self.assertEqual(item['year'], 2000)
+ self.assertEqual(item["year"], 2000)
def test_change_database(self):
- self.config['zero']['fields'] = ['year']
- self.config['zero']['update_database'] = True
+ self.config["zero"]["fields"] = ["year"]
+ self.config["zero"]["update_database"] = True
item = self.add_item_fixture(year=2000)
item.write()
- self.load_plugins('zero')
+ self.load_plugins("zero")
item.write()
- self.assertEqual(item['year'], 0)
+ self.assertEqual(item["year"], 0)
def test_album_art(self):
- self.config['zero']['fields'] = ['images']
+ self.config["zero"]["fields"] = ["images"]
- path = self.create_mediafile_fixture(images=['jpg'])
+ path = self.create_mediafile_fixture(images=["jpg"])
item = Item.from_path(path)
- self.load_plugins('zero')
+ self.load_plugins("zero")
item.write()
mf = MediaFile(syspath(path))
self.assertFalse(mf.images)
def test_auto_false(self):
- self.config['zero']['fields'] = ['year']
- self.config['zero']['update_database'] = True
- self.config['zero']['auto'] = False
+ self.config["zero"]["fields"] = ["year"]
+ self.config["zero"]["update_database"] = True
+ self.config["zero"]["auto"] = False
item = self.add_item_fixture(year=2000)
item.write()
- self.load_plugins('zero')
+ self.load_plugins("zero")
item.write()
- self.assertEqual(item['year'], 2000)
+ self.assertEqual(item["year"], 2000)
def test_subcommand_update_database_true(self):
item = self.add_item_fixture(
- year=2016,
- day=13,
- month=3,
- comments='test comment'
+ year=2016, day=13, month=3, comments="test comment"
)
item.write()
item_id = item.id
- self.config['zero']['fields'] = ['comments']
- self.config['zero']['update_database'] = True
- self.config['zero']['auto'] = False
+ self.config["zero"]["fields"] = ["comments"]
+ self.config["zero"]["update_database"] = True
+ self.config["zero"]["auto"] = False
- self.load_plugins('zero')
- with control_stdin('y'):
- self.run_command('zero')
+ self.load_plugins("zero")
+ with control_stdin("y"):
+ self.run_command("zero")
mf = MediaFile(syspath(item.path))
item = self.lib.get_item(item_id)
- self.assertEqual(item['year'], 2016)
+ self.assertEqual(item["year"], 2016)
self.assertEqual(mf.year, 2016)
self.assertEqual(mf.comments, None)
- self.assertEqual(item['comments'], '')
+ self.assertEqual(item["comments"], "")
def test_subcommand_update_database_false(self):
item = self.add_item_fixture(
- year=2016,
- day=13,
- month=3,
- comments='test comment'
+ year=2016, day=13, month=3, comments="test comment"
)
item.write()
item_id = item.id
- self.config['zero']['fields'] = ['comments']
- self.config['zero']['update_database'] = False
- self.config['zero']['auto'] = False
+ self.config["zero"]["fields"] = ["comments"]
+ self.config["zero"]["update_database"] = False
+ self.config["zero"]["auto"] = False
- self.load_plugins('zero')
- with control_stdin('y'):
- self.run_command('zero')
+ self.load_plugins("zero")
+ with control_stdin("y"):
+ self.run_command("zero")
mf = MediaFile(syspath(item.path))
item = self.lib.get_item(item_id)
- self.assertEqual(item['year'], 2016)
+ self.assertEqual(item["year"], 2016)
self.assertEqual(mf.year, 2016)
- self.assertEqual(item['comments'], 'test comment')
+ self.assertEqual(item["comments"], "test comment")
self.assertEqual(mf.comments, None)
def test_subcommand_query_include(self):
item = self.add_item_fixture(
- year=2016,
- day=13,
- month=3,
- comments='test comment'
+ year=2016, day=13, month=3, comments="test comment"
)
item.write()
- self.config['zero']['fields'] = ['comments']
- self.config['zero']['update_database'] = False
- self.config['zero']['auto'] = False
+ self.config["zero"]["fields"] = ["comments"]
+ self.config["zero"]["update_database"] = False
+ self.config["zero"]["auto"] = False
- self.load_plugins('zero')
- self.run_command('zero', 'year: 2016')
+ self.load_plugins("zero")
+ self.run_command("zero", "year: 2016")
mf = MediaFile(syspath(item.path))
@@ -193,25 +185,22 @@ def test_subcommand_query_include(self):
def test_subcommand_query_exclude(self):
item = self.add_item_fixture(
- year=2016,
- day=13,
- month=3,
- comments='test comment'
+ year=2016, day=13, month=3, comments="test comment"
)
item.write()
- self.config['zero']['fields'] = ['comments']
- self.config['zero']['update_database'] = False
- self.config['zero']['auto'] = False
+ self.config["zero"]["fields"] = ["comments"]
+ self.config["zero"]["update_database"] = False
+ self.config["zero"]["auto"] = False
- self.load_plugins('zero')
- self.run_command('zero', 'year: 0000')
+ self.load_plugins("zero")
+ self.run_command("zero", "year: 0000")
mf = MediaFile(syspath(item.path))
self.assertEqual(mf.year, 2016)
- self.assertEqual(mf.comments, 'test comment')
+ self.assertEqual(mf.comments, "test comment")
def test_no_fields(self):
item = self.add_item_fixture(year=2016)
@@ -221,13 +210,13 @@ def test_no_fields(self):
item_id = item.id
- self.load_plugins('zero')
- with control_stdin('y'):
- self.run_command('zero')
+ self.load_plugins("zero")
+ with control_stdin("y"):
+ self.run_command("zero")
item = self.lib.get_item(item_id)
- self.assertEqual(item['year'], 2016)
+ self.assertEqual(item["year"], 2016)
self.assertEqual(mediafile.year, 2016)
def test_whitelist_and_blacklist(self):
@@ -237,80 +226,78 @@ def test_whitelist_and_blacklist(self):
self.assertEqual(mf.year, 2016)
item_id = item.id
- self.config['zero']['fields'] = ['year']
- self.config['zero']['keep_fields'] = ['comments']
+ self.config["zero"]["fields"] = ["year"]
+ self.config["zero"]["keep_fields"] = ["comments"]
- self.load_plugins('zero')
- with control_stdin('y'):
- self.run_command('zero')
+ self.load_plugins("zero")
+ with control_stdin("y"):
+ self.run_command("zero")
item = self.lib.get_item(item_id)
- self.assertEqual(item['year'], 2016)
+ self.assertEqual(item["year"], 2016)
self.assertEqual(mf.year, 2016)
def test_keep_fields(self):
- item = self.add_item_fixture(year=2016, comments='test comment')
- self.config['zero']['keep_fields'] = ['year']
- self.config['zero']['fields'] = None
- self.config['zero']['update_database'] = True
+ item = self.add_item_fixture(year=2016, comments="test comment")
+ self.config["zero"]["keep_fields"] = ["year"]
+ self.config["zero"]["fields"] = None
+ self.config["zero"]["update_database"] = True
tags = {
- 'comments': 'test comment',
- 'year': 2016,
+ "comments": "test comment",
+ "year": 2016,
}
- self.load_plugins('zero')
+ self.load_plugins("zero")
z = ZeroPlugin()
z.write_event(item, item.path, tags)
- self.assertEqual(tags['comments'], None)
- self.assertEqual(tags['year'], 2016)
+ self.assertEqual(tags["comments"], None)
+ self.assertEqual(tags["year"], 2016)
def test_keep_fields_removes_preserved_tags(self):
- self.config['zero']['keep_fields'] = ['year']
- self.config['zero']['fields'] = None
- self.config['zero']['update_database'] = True
+ self.config["zero"]["keep_fields"] = ["year"]
+ self.config["zero"]["fields"] = None
+ self.config["zero"]["update_database"] = True
z = ZeroPlugin()
- self.assertNotIn('id', z.fields_to_progs)
+ self.assertNotIn("id", z.fields_to_progs)
def test_fields_removes_preserved_tags(self):
- self.config['zero']['fields'] = ['year id']
- self.config['zero']['update_database'] = True
+ self.config["zero"]["fields"] = ["year id"]
+ self.config["zero"]["update_database"] = True
z = ZeroPlugin()
- self.assertNotIn('id', z.fields_to_progs)
+ self.assertNotIn("id", z.fields_to_progs)
def test_empty_query_n_response_no_changes(self):
item = self.add_item_fixture(
- year=2016,
- day=13,
- month=3,
- comments='test comment'
+ year=2016, day=13, month=3, comments="test comment"
)
item.write()
item_id = item.id
- self.config['zero']['fields'] = ['comments']
- self.config['zero']['update_database'] = True
- self.config['zero']['auto'] = False
+ self.config["zero"]["fields"] = ["comments"]
+ self.config["zero"]["update_database"] = True
+ self.config["zero"]["auto"] = False
- self.load_plugins('zero')
- with control_stdin('n'):
- self.run_command('zero')
+ self.load_plugins("zero")
+ with control_stdin("n"):
+ self.run_command("zero")
mf = MediaFile(syspath(item.path))
item = self.lib.get_item(item_id)
- self.assertEqual(item['year'], 2016)
+ self.assertEqual(item["year"], 2016)
self.assertEqual(mf.year, 2016)
- self.assertEqual(mf.comments, 'test comment')
- self.assertEqual(item['comments'], 'test comment')
+ self.assertEqual(mf.comments, "test comment")
+ self.assertEqual(item["comments"], "test comment")
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/rsrc/beetsplug/test.py b/test/rsrc/beetsplug/test.py
index c57d3f517d..70e5e2daf0 100644
--- a/test/rsrc/beetsplug/test.py
+++ b/test/rsrc/beetsplug/test.py
@@ -1,5 +1,5 @@
-from beets.plugins import BeetsPlugin
from beets import ui
+from beets.plugins import BeetsPlugin
class TestPlugin(BeetsPlugin):
@@ -8,12 +8,12 @@ def __init__(self):
self.is_test_plugin = True
def commands(self):
- test = ui.Subcommand('test')
+ test = ui.Subcommand("test")
test.func = lambda *args: None
# Used in CompletionTest
- test.parser.add_option('-o', '--option', dest='my_opt')
+ test.parser.add_option("-o", "--option", dest="my_opt")
- plugin = ui.Subcommand('plugin')
+ plugin = ui.Subcommand("plugin")
plugin.func = lambda *args: None
return [test, plugin]
diff --git a/test/rsrc/convert_stub.py b/test/rsrc/convert_stub.py
index 409c3e336d..14282c8ce6 100755
--- a/test/rsrc/convert_stub.py
+++ b/test/rsrc/convert_stub.py
@@ -4,29 +4,28 @@
a specified text tag.
"""
-import sys
import locale
+import sys
# From `beets.util`.
def arg_encoding():
try:
- return locale.getdefaultlocale()[1] or 'utf-8'
+ return locale.getdefaultlocale()[1] or "utf-8"
except ValueError:
- return 'utf-8'
+ return "utf-8"
def convert(in_file, out_file, tag):
- """Copy `in_file` to `out_file` and append the string `tag`.
- """
+ """Copy `in_file` to `out_file` and append the string `tag`."""
if not isinstance(tag, bytes):
- tag = tag.encode('utf-8')
+ tag = tag.encode("utf-8")
- with open(out_file, 'wb') as out_f:
- with open(in_file, 'rb') as in_f:
+ with open(out_file, "wb") as out_f:
+ with open(in_file, "rb") as in_f:
out_f.write(in_f.read())
out_f.write(tag)
-if __name__ == '__main__':
+if __name__ == "__main__":
convert(sys.argv[1], sys.argv[2], sys.argv[3])
diff --git a/test/test_art_resize.py b/test/test_art_resize.py
index 9660d96a2a..e14ff42efb 100644
--- a/test/test_art_resize.py
+++ b/test/test_art_resize.py
@@ -15,12 +15,12 @@
"""Tests for image resizing based on filesize."""
-import unittest
-from unittest.mock import patch
import os
-
+import unittest
from test import _common
from test.helper import TestHelper
+from unittest.mock import patch
+
from beets.util import command_output, syspath
from beets.util.artresizer import IMBackend, PILBackend
@@ -35,9 +35,9 @@ def __init__(self):
"""Init a dummy backend class for mocked ImageMagick tests."""
self.version = (7, 0, 0)
self.legacy = False
- self.convert_cmd = ['magick']
- self.identify_cmd = ['magick', 'identify']
- self.compare_cmd = ['magick', 'compare']
+ self.convert_cmd = ["magick"]
+ self.identify_cmd = ["magick", "identify"]
+ self.compare_cmd = ["magick", "compare"]
class DummyPILBackend(PILBackend):
@@ -83,8 +83,9 @@ def _test_img_resize(self, backend):
)
self.assertExists(im_a)
# target size was achieved
- self.assertLess(os.stat(syspath(im_a)).st_size,
- os.stat(syspath(im_95_qual)).st_size)
+ self.assertLess(
+ os.stat(syspath(im_a)).st_size, os.stat(syspath(im_95_qual)).st_size
+ )
# Attempt with lower initial quality
im_75_qual = backend.resize(
@@ -103,8 +104,9 @@ def _test_img_resize(self, backend):
)
self.assertExists(im_b)
# Check high (initial) quality still gives a smaller filesize
- self.assertLess(os.stat(syspath(im_b)).st_size,
- os.stat(syspath(im_75_qual)).st_size)
+ self.assertLess(
+ os.stat(syspath(im_b)).st_size, os.stat(syspath(im_75_qual)).st_size
+ )
@unittest.skipUnless(PILBackend.available(), "PIL not available")
def test_pil_file_resize(self):
@@ -125,8 +127,9 @@ def test_pil_file_deinterlace(self):
"""
path = PILBackend().deinterlace(self.IMG_225x225)
from PIL import Image
+
with Image.open(path) as img:
- self.assertFalse('progression' in img.info)
+ self.assertFalse("progression" in img.info)
@unittest.skipUnless(IMBackend.available(), "ImageMagick not available")
def test_im_file_deinterlace(self):
@@ -138,12 +141,14 @@ def test_im_file_deinterlace(self):
im = IMBackend()
path = im.deinterlace(self.IMG_225x225)
cmd = im.identify_cmd + [
- '-format', '%[interlace]', syspath(path, prefix=False),
+ "-format",
+ "%[interlace]",
+ syspath(path, prefix=False),
]
out = command_output(cmd).stdout
- self.assertTrue(out == b'None')
+ self.assertTrue(out == b"None")
- @patch('beets.util.artresizer.util')
+ @patch("beets.util.artresizer.util")
def test_write_metadata_im(self, mock_util):
"""Test writing image metadata."""
metadata = {"a": "A", "b": "B"}
diff --git a/test/test_autotag.py b/test/test_autotag.py
index ae607cb19a..c0268910d9 100644
--- a/test/test_autotag.py
+++ b/test/test_autotag.py
@@ -17,15 +17,13 @@
import re
import unittest
-
from test import _common
-from beets import autotag
-from beets.autotag import match
+
+from beets import autotag, config
+from beets.autotag import AlbumInfo, TrackInfo, match
from beets.autotag.hooks import Distance, string_dist
from beets.library import Item
from beets.util import plurality
-from beets.autotag import AlbumInfo, TrackInfo
-from beets import config
class PluralityTest(_common.TestCase):
@@ -52,63 +50,91 @@ def test_plurality_empty_sequence_raises_error(self):
plurality([])
def test_current_metadata_finds_pluralities(self):
- items = [Item(artist='The Beetles', album='The White Album'),
- Item(artist='The Beatles', album='The White Album'),
- Item(artist='The Beatles', album='Teh White Album')]
+ items = [
+ Item(artist="The Beetles", album="The White Album"),
+ Item(artist="The Beatles", album="The White Album"),
+ Item(artist="The Beatles", album="Teh White Album"),
+ ]
likelies, consensus = match.current_metadata(items)
- self.assertEqual(likelies['artist'], 'The Beatles')
- self.assertEqual(likelies['album'], 'The White Album')
- self.assertFalse(consensus['artist'])
+ self.assertEqual(likelies["artist"], "The Beatles")
+ self.assertEqual(likelies["album"], "The White Album")
+ self.assertFalse(consensus["artist"])
def test_current_metadata_artist_consensus(self):
- items = [Item(artist='The Beatles', album='The White Album'),
- Item(artist='The Beatles', album='The White Album'),
- Item(artist='The Beatles', album='Teh White Album')]
+ items = [
+ Item(artist="The Beatles", album="The White Album"),
+ Item(artist="The Beatles", album="The White Album"),
+ Item(artist="The Beatles", album="Teh White Album"),
+ ]
likelies, consensus = match.current_metadata(items)
- self.assertEqual(likelies['artist'], 'The Beatles')
- self.assertEqual(likelies['album'], 'The White Album')
- self.assertTrue(consensus['artist'])
+ self.assertEqual(likelies["artist"], "The Beatles")
+ self.assertEqual(likelies["album"], "The White Album")
+ self.assertTrue(consensus["artist"])
def test_albumartist_consensus(self):
- items = [Item(artist='tartist1', album='album',
- albumartist='aartist'),
- Item(artist='tartist2', album='album',
- albumartist='aartist'),
- Item(artist='tartist3', album='album',
- albumartist='aartist')]
+ items = [
+ Item(artist="tartist1", album="album", albumartist="aartist"),
+ Item(artist="tartist2", album="album", albumartist="aartist"),
+ Item(artist="tartist3", album="album", albumartist="aartist"),
+ ]
likelies, consensus = match.current_metadata(items)
- self.assertEqual(likelies['artist'], 'aartist')
- self.assertFalse(consensus['artist'])
+ self.assertEqual(likelies["artist"], "aartist")
+ self.assertFalse(consensus["artist"])
def test_current_metadata_likelies(self):
- fields = ['artist', 'album', 'albumartist', 'year', 'disctotal',
- 'mb_albumid', 'label', 'catalognum', 'country', 'media',
- 'albumdisambig']
- items = [Item(**{f: '{}_{}'.format(f, i or 1) for f in fields})
- for i in range(5)]
+ fields = [
+ "artist",
+ "album",
+ "albumartist",
+ "year",
+ "disctotal",
+ "mb_albumid",
+ "label",
+ "catalognum",
+ "country",
+ "media",
+ "albumdisambig",
+ ]
+ items = [
+ Item(**{f: "{}_{}".format(f, i or 1) for f in fields})
+ for i in range(5)
+ ]
likelies, _ = match.current_metadata(items)
for f in fields:
if isinstance(likelies[f], int):
self.assertEqual(likelies[f], 0)
else:
- self.assertEqual(likelies[f], '%s_1' % f)
+ self.assertEqual(likelies[f], "%s_1" % f)
-def _make_item(title, track, artist='some artist'):
- return Item(title=title, track=track,
- artist=artist, album='some album',
- length=1,
- mb_trackid='', mb_albumid='', mb_artistid='')
+def _make_item(title, track, artist="some artist"):
+ return Item(
+ title=title,
+ track=track,
+ artist=artist,
+ album="some album",
+ length=1,
+ mb_trackid="",
+ mb_albumid="",
+ mb_artistid="",
+ )
def _make_trackinfo():
return [
- TrackInfo(title='one', track_id=None, artist='some artist',
- length=1, index=1),
- TrackInfo(title='two', track_id=None, artist='some artist',
- length=1, index=2),
- TrackInfo(title='three', track_id=None, artist='some artist',
- length=1, index=3),
+ TrackInfo(
+ title="one", track_id=None, artist="some artist", length=1, index=1
+ ),
+ TrackInfo(
+ title="two", track_id=None, artist="some artist", length=1, index=2
+ ),
+ TrackInfo(
+ title="three",
+ track_id=None,
+ artist="some artist",
+ length=1,
+ index=3,
+ ),
]
@@ -116,7 +142,7 @@ def _clear_weights():
"""Hack around the lazy descriptor used to cache weights for
Distance calculations.
"""
- Distance.__dict__['_weights'].computed = False
+ Distance.__dict__["_weights"].computed = False
class DistanceTest(_common.TestCase):
@@ -126,131 +152,132 @@ def tearDown(self):
def test_add(self):
dist = Distance()
- dist.add('add', 1.0)
- self.assertEqual(dist._penalties, {'add': [1.0]})
+ dist.add("add", 1.0)
+ self.assertEqual(dist._penalties, {"add": [1.0]})
def test_add_equality(self):
dist = Distance()
- dist.add_equality('equality', 'ghi', ['abc', 'def', 'ghi'])
- self.assertEqual(dist._penalties['equality'], [0.0])
+ dist.add_equality("equality", "ghi", ["abc", "def", "ghi"])
+ self.assertEqual(dist._penalties["equality"], [0.0])
- dist.add_equality('equality', 'xyz', ['abc', 'def', 'ghi'])
- self.assertEqual(dist._penalties['equality'], [0.0, 1.0])
+ dist.add_equality("equality", "xyz", ["abc", "def", "ghi"])
+ self.assertEqual(dist._penalties["equality"], [0.0, 1.0])
- dist.add_equality('equality', 'abc', re.compile(r'ABC', re.I))
- self.assertEqual(dist._penalties['equality'], [0.0, 1.0, 0.0])
+ dist.add_equality("equality", "abc", re.compile(r"ABC", re.I))
+ self.assertEqual(dist._penalties["equality"], [0.0, 1.0, 0.0])
def test_add_expr(self):
dist = Distance()
- dist.add_expr('expr', True)
- self.assertEqual(dist._penalties['expr'], [1.0])
+ dist.add_expr("expr", True)
+ self.assertEqual(dist._penalties["expr"], [1.0])
- dist.add_expr('expr', False)
- self.assertEqual(dist._penalties['expr'], [1.0, 0.0])
+ dist.add_expr("expr", False)
+ self.assertEqual(dist._penalties["expr"], [1.0, 0.0])
def test_add_number(self):
dist = Distance()
# Add a full penalty for each number of difference between two numbers.
- dist.add_number('number', 1, 1)
- self.assertEqual(dist._penalties['number'], [0.0])
+ dist.add_number("number", 1, 1)
+ self.assertEqual(dist._penalties["number"], [0.0])
- dist.add_number('number', 1, 2)
- self.assertEqual(dist._penalties['number'], [0.0, 1.0])
+ dist.add_number("number", 1, 2)
+ self.assertEqual(dist._penalties["number"], [0.0, 1.0])
- dist.add_number('number', 2, 1)
- self.assertEqual(dist._penalties['number'], [0.0, 1.0, 1.0])
+ dist.add_number("number", 2, 1)
+ self.assertEqual(dist._penalties["number"], [0.0, 1.0, 1.0])
- dist.add_number('number', -1, 2)
- self.assertEqual(dist._penalties['number'], [0.0, 1.0, 1.0, 1.0,
- 1.0, 1.0])
+ dist.add_number("number", -1, 2)
+ self.assertEqual(
+ dist._penalties["number"], [0.0, 1.0, 1.0, 1.0, 1.0, 1.0]
+ )
def test_add_priority(self):
dist = Distance()
- dist.add_priority('priority', 'abc', 'abc')
- self.assertEqual(dist._penalties['priority'], [0.0])
+ dist.add_priority("priority", "abc", "abc")
+ self.assertEqual(dist._penalties["priority"], [0.0])
- dist.add_priority('priority', 'def', ['abc', 'def'])
- self.assertEqual(dist._penalties['priority'], [0.0, 0.5])
+ dist.add_priority("priority", "def", ["abc", "def"])
+ self.assertEqual(dist._penalties["priority"], [0.0, 0.5])
- dist.add_priority('priority', 'gh', ['ab', 'cd', 'ef',
- re.compile('GH', re.I)])
- self.assertEqual(dist._penalties['priority'], [0.0, 0.5, 0.75])
+ dist.add_priority(
+ "priority", "gh", ["ab", "cd", "ef", re.compile("GH", re.I)]
+ )
+ self.assertEqual(dist._penalties["priority"], [0.0, 0.5, 0.75])
- dist.add_priority('priority', 'xyz', ['abc', 'def'])
- self.assertEqual(dist._penalties['priority'], [0.0, 0.5, 0.75,
- 1.0])
+ dist.add_priority("priority", "xyz", ["abc", "def"])
+ self.assertEqual(dist._penalties["priority"], [0.0, 0.5, 0.75, 1.0])
def test_add_ratio(self):
dist = Distance()
- dist.add_ratio('ratio', 25, 100)
- self.assertEqual(dist._penalties['ratio'], [0.25])
+ dist.add_ratio("ratio", 25, 100)
+ self.assertEqual(dist._penalties["ratio"], [0.25])
- dist.add_ratio('ratio', 10, 5)
- self.assertEqual(dist._penalties['ratio'], [0.25, 1.0])
+ dist.add_ratio("ratio", 10, 5)
+ self.assertEqual(dist._penalties["ratio"], [0.25, 1.0])
- dist.add_ratio('ratio', -5, 5)
- self.assertEqual(dist._penalties['ratio'], [0.25, 1.0, 0.0])
+ dist.add_ratio("ratio", -5, 5)
+ self.assertEqual(dist._penalties["ratio"], [0.25, 1.0, 0.0])
- dist.add_ratio('ratio', 5, 0)
- self.assertEqual(dist._penalties['ratio'], [0.25, 1.0, 0.0, 0.0])
+ dist.add_ratio("ratio", 5, 0)
+ self.assertEqual(dist._penalties["ratio"], [0.25, 1.0, 0.0, 0.0])
def test_add_string(self):
dist = Distance()
- sdist = string_dist('abc', 'bcd')
- dist.add_string('string', 'abc', 'bcd')
- self.assertEqual(dist._penalties['string'], [sdist])
- self.assertNotEqual(dist._penalties['string'], [0])
+ sdist = string_dist("abc", "bcd")
+ dist.add_string("string", "abc", "bcd")
+ self.assertEqual(dist._penalties["string"], [sdist])
+ self.assertNotEqual(dist._penalties["string"], [0])
def test_add_string_none(self):
dist = Distance()
- dist.add_string('string', None, 'string')
- self.assertEqual(dist._penalties['string'], [1])
+ dist.add_string("string", None, "string")
+ self.assertEqual(dist._penalties["string"], [1])
def test_add_string_both_none(self):
dist = Distance()
- dist.add_string('string', None, None)
- self.assertEqual(dist._penalties['string'], [0])
+ dist.add_string("string", None, None)
+ self.assertEqual(dist._penalties["string"], [0])
def test_distance(self):
- config['match']['distance_weights']['album'] = 2.0
- config['match']['distance_weights']['medium'] = 1.0
+ config["match"]["distance_weights"]["album"] = 2.0
+ config["match"]["distance_weights"]["medium"] = 1.0
_clear_weights()
dist = Distance()
- dist.add('album', 0.5)
- dist.add('media', 0.25)
- dist.add('media', 0.75)
+ dist.add("album", 0.5)
+ dist.add("media", 0.25)
+ dist.add("media", 0.75)
self.assertEqual(dist.distance, 0.5)
# __getitem__()
- self.assertEqual(dist['album'], 0.25)
- self.assertEqual(dist['media'], 0.25)
+ self.assertEqual(dist["album"], 0.25)
+ self.assertEqual(dist["media"], 0.25)
def test_max_distance(self):
- config['match']['distance_weights']['album'] = 3.0
- config['match']['distance_weights']['medium'] = 1.0
+ config["match"]["distance_weights"]["album"] = 3.0
+ config["match"]["distance_weights"]["medium"] = 1.0
_clear_weights()
dist = Distance()
- dist.add('album', 0.5)
- dist.add('medium', 0.0)
- dist.add('medium', 0.0)
+ dist.add("album", 0.5)
+ dist.add("medium", 0.0)
+ dist.add("medium", 0.0)
self.assertEqual(dist.max_distance, 5.0)
def test_operators(self):
- config['match']['distance_weights']['source'] = 1.0
- config['match']['distance_weights']['album'] = 2.0
- config['match']['distance_weights']['medium'] = 1.0
+ config["match"]["distance_weights"]["source"] = 1.0
+ config["match"]["distance_weights"]["album"] = 2.0
+ config["match"]["distance_weights"]["medium"] = 1.0
_clear_weights()
dist = Distance()
- dist.add('source', 0.0)
- dist.add('album', 0.5)
- dist.add('medium', 0.25)
- dist.add('medium', 0.75)
+ dist.add("source", 0.0)
+ dist.add("album", 0.5)
+ dist.add("medium", 0.25)
+ dist.add("medium", 0.75)
self.assertEqual(len(dist), 2)
- self.assertEqual(list(dist), [('album', 0.2), ('medium', 0.2)])
+ self.assertEqual(list(dist), [("album", 0.2), ("medium", 0.2)])
self.assertTrue(dist == 0.4)
self.assertTrue(dist < 1.0)
self.assertTrue(dist > 0.0)
@@ -259,71 +286,72 @@ def test_operators(self):
self.assertEqual(float(dist), 0.4)
def test_raw_distance(self):
- config['match']['distance_weights']['album'] = 3.0
- config['match']['distance_weights']['medium'] = 1.0
+ config["match"]["distance_weights"]["album"] = 3.0
+ config["match"]["distance_weights"]["medium"] = 1.0
_clear_weights()
dist = Distance()
- dist.add('album', 0.5)
- dist.add('medium', 0.25)
- dist.add('medium', 0.5)
+ dist.add("album", 0.5)
+ dist.add("medium", 0.25)
+ dist.add("medium", 0.5)
self.assertEqual(dist.raw_distance, 2.25)
def test_items(self):
- config['match']['distance_weights']['album'] = 4.0
- config['match']['distance_weights']['medium'] = 2.0
+ config["match"]["distance_weights"]["album"] = 4.0
+ config["match"]["distance_weights"]["medium"] = 2.0
_clear_weights()
dist = Distance()
- dist.add('album', 0.1875)
- dist.add('medium', 0.75)
- self.assertEqual(dist.items(), [('medium', 0.25), ('album', 0.125)])
+ dist.add("album", 0.1875)
+ dist.add("medium", 0.75)
+ self.assertEqual(dist.items(), [("medium", 0.25), ("album", 0.125)])
# Sort by key if distance is equal.
dist = Distance()
- dist.add('album', 0.375)
- dist.add('medium', 0.75)
- self.assertEqual(dist.items(), [('album', 0.25), ('medium', 0.25)])
+ dist.add("album", 0.375)
+ dist.add("medium", 0.75)
+ self.assertEqual(dist.items(), [("album", 0.25), ("medium", 0.25)])
def test_update(self):
dist1 = Distance()
- dist1.add('album', 0.5)
- dist1.add('media', 1.0)
+ dist1.add("album", 0.5)
+ dist1.add("media", 1.0)
dist2 = Distance()
- dist2.add('album', 0.75)
- dist2.add('album', 0.25)
- dist2.add('media', 0.05)
+ dist2.add("album", 0.75)
+ dist2.add("album", 0.25)
+ dist2.add("media", 0.05)
dist1.update(dist2)
- self.assertEqual(dist1._penalties, {'album': [0.5, 0.75, 0.25],
- 'media': [1.0, 0.05]})
+ self.assertEqual(
+ dist1._penalties, {"album": [0.5, 0.75, 0.25], "media": [1.0, 0.05]}
+ )
class TrackDistanceTest(_common.TestCase):
def test_identical_tracks(self):
- item = _make_item('one', 1)
+ item = _make_item("one", 1)
info = _make_trackinfo()[0]
dist = match.track_distance(item, info, incl_artist=True)
self.assertEqual(dist, 0.0)
def test_different_title(self):
- item = _make_item('foo', 1)
+ item = _make_item("foo", 1)
info = _make_trackinfo()[0]
dist = match.track_distance(item, info, incl_artist=True)
self.assertNotEqual(dist, 0.0)
def test_different_artist(self):
- item = _make_item('one', 1)
- item.artist = 'foo'
+ item = _make_item("one", 1)
+ item.artist = "foo"
info = _make_trackinfo()[0]
dist = match.track_distance(item, info, incl_artist=True)
self.assertNotEqual(dist, 0.0)
def test_various_artists_tolerated(self):
- item = _make_item('one', 1)
- item.artist = 'Various Artists'
+ item = _make_item("one", 1)
+ item.artist = "Various Artists"
info = _make_trackinfo()[0]
dist = match.track_distance(item, info, incl_artist=True)
self.assertEqual(dist, 0.0)
@@ -341,26 +369,26 @@ def _dist(self, items, info):
def test_identical_albums(self):
items = []
- items.append(_make_item('one', 1))
- items.append(_make_item('two', 2))
- items.append(_make_item('three', 3))
+ items.append(_make_item("one", 1))
+ items.append(_make_item("two", 2))
+ items.append(_make_item("three", 3))
info = AlbumInfo(
- artist='some artist',
- album='some album',
+ artist="some artist",
+ album="some album",
tracks=_make_trackinfo(),
- va=False
+ va=False,
)
self.assertEqual(self._dist(items, info), 0)
def test_incomplete_album(self):
items = []
- items.append(_make_item('one', 1))
- items.append(_make_item('three', 3))
+ items.append(_make_item("one", 1))
+ items.append(_make_item("three", 3))
info = AlbumInfo(
- artist='some artist',
- album='some album',
+ artist="some artist",
+ album="some album",
tracks=_make_trackinfo(),
- va=False
+ va=False,
)
dist = self._dist(items, info)
self.assertNotEqual(dist, 0)
@@ -369,41 +397,41 @@ def test_incomplete_album(self):
def test_global_artists_differ(self):
items = []
- items.append(_make_item('one', 1))
- items.append(_make_item('two', 2))
- items.append(_make_item('three', 3))
+ items.append(_make_item("one", 1))
+ items.append(_make_item("two", 2))
+ items.append(_make_item("three", 3))
info = AlbumInfo(
- artist='someone else',
- album='some album',
+ artist="someone else",
+ album="some album",
tracks=_make_trackinfo(),
- va=False
+ va=False,
)
self.assertNotEqual(self._dist(items, info), 0)
def test_comp_track_artists_match(self):
items = []
- items.append(_make_item('one', 1))
- items.append(_make_item('two', 2))
- items.append(_make_item('three', 3))
+ items.append(_make_item("one", 1))
+ items.append(_make_item("two", 2))
+ items.append(_make_item("three", 3))
info = AlbumInfo(
- artist='should be ignored',
- album='some album',
+ artist="should be ignored",
+ album="some album",
tracks=_make_trackinfo(),
- va=True
+ va=True,
)
self.assertEqual(self._dist(items, info), 0)
def test_comp_no_track_artists(self):
# Some VA releases don't have track artists (incomplete metadata).
items = []
- items.append(_make_item('one', 1))
- items.append(_make_item('two', 2))
- items.append(_make_item('three', 3))
+ items.append(_make_item("one", 1))
+ items.append(_make_item("two", 2))
+ items.append(_make_item("three", 3))
info = AlbumInfo(
- artist='should be ignored',
- album='some album',
+ artist="should be ignored",
+ album="some album",
tracks=_make_trackinfo(),
- va=True
+ va=True,
)
info.tracks[0].artist = None
info.tracks[1].artist = None
@@ -412,41 +440,41 @@ def test_comp_no_track_artists(self):
def test_comp_track_artists_do_not_match(self):
items = []
- items.append(_make_item('one', 1))
- items.append(_make_item('two', 2, 'someone else'))
- items.append(_make_item('three', 3))
+ items.append(_make_item("one", 1))
+ items.append(_make_item("two", 2, "someone else"))
+ items.append(_make_item("three", 3))
info = AlbumInfo(
- artist='some artist',
- album='some album',
+ artist="some artist",
+ album="some album",
tracks=_make_trackinfo(),
- va=True
+ va=True,
)
self.assertNotEqual(self._dist(items, info), 0)
def test_tracks_out_of_order(self):
items = []
- items.append(_make_item('one', 1))
- items.append(_make_item('three', 2))
- items.append(_make_item('two', 3))
+ items.append(_make_item("one", 1))
+ items.append(_make_item("three", 2))
+ items.append(_make_item("two", 3))
info = AlbumInfo(
- artist='some artist',
- album='some album',
+ artist="some artist",
+ album="some album",
tracks=_make_trackinfo(),
- va=False
+ va=False,
)
dist = self._dist(items, info)
self.assertTrue(0 < dist < 0.2)
def test_two_medium_release(self):
items = []
- items.append(_make_item('one', 1))
- items.append(_make_item('two', 2))
- items.append(_make_item('three', 3))
+ items.append(_make_item("one", 1))
+ items.append(_make_item("two", 2))
+ items.append(_make_item("three", 3))
info = AlbumInfo(
- artist='some artist',
- album='some album',
+ artist="some artist",
+ album="some album",
tracks=_make_trackinfo(),
- va=False
+ va=False,
)
info.tracks[0].medium_index = 1
info.tracks[1].medium_index = 2
@@ -456,14 +484,14 @@ def test_two_medium_release(self):
def test_per_medium_track_numbers(self):
items = []
- items.append(_make_item('one', 1))
- items.append(_make_item('two', 2))
- items.append(_make_item('three', 1))
+ items.append(_make_item("one", 1))
+ items.append(_make_item("two", 2))
+ items.append(_make_item("three", 1))
info = AlbumInfo(
- artist='some artist',
- album='some album',
+ artist="some artist",
+ album="some album",
tracks=_make_trackinfo(),
- va=False
+ va=False,
)
info.tracks[0].medium_index = 1
info.tracks[1].medium_index = 2
@@ -475,93 +503,115 @@ def test_per_medium_track_numbers(self):
class AssignmentTest(unittest.TestCase):
def item(self, title, track):
return Item(
- title=title, track=track,
- mb_trackid='', mb_albumid='', mb_artistid='',
+ title=title,
+ track=track,
+ mb_trackid="",
+ mb_albumid="",
+ mb_artistid="",
)
def test_reorder_when_track_numbers_incorrect(self):
items = []
- items.append(self.item('one', 1))
- items.append(self.item('three', 2))
- items.append(self.item('two', 3))
+ items.append(self.item("one", 1))
+ items.append(self.item("three", 2))
+ items.append(self.item("two", 3))
trackinfo = []
- trackinfo.append(TrackInfo(title='one'))
- trackinfo.append(TrackInfo(title='two'))
- trackinfo.append(TrackInfo(title='three'))
- mapping, extra_items, extra_tracks = \
- match.assign_items(items, trackinfo)
+ trackinfo.append(TrackInfo(title="one"))
+ trackinfo.append(TrackInfo(title="two"))
+ trackinfo.append(TrackInfo(title="three"))
+ mapping, extra_items, extra_tracks = match.assign_items(
+ items, trackinfo
+ )
self.assertEqual(extra_items, [])
self.assertEqual(extra_tracks, [])
- self.assertEqual(mapping, {
- items[0]: trackinfo[0],
- items[1]: trackinfo[2],
- items[2]: trackinfo[1],
- })
+ self.assertEqual(
+ mapping,
+ {
+ items[0]: trackinfo[0],
+ items[1]: trackinfo[2],
+ items[2]: trackinfo[1],
+ },
+ )
def test_order_works_with_invalid_track_numbers(self):
items = []
- items.append(self.item('one', 1))
- items.append(self.item('three', 1))
- items.append(self.item('two', 1))
+ items.append(self.item("one", 1))
+ items.append(self.item("three", 1))
+ items.append(self.item("two", 1))
trackinfo = []
- trackinfo.append(TrackInfo(title='one'))
- trackinfo.append(TrackInfo(title='two'))
- trackinfo.append(TrackInfo(title='three'))
- mapping, extra_items, extra_tracks = \
- match.assign_items(items, trackinfo)
+ trackinfo.append(TrackInfo(title="one"))
+ trackinfo.append(TrackInfo(title="two"))
+ trackinfo.append(TrackInfo(title="three"))
+ mapping, extra_items, extra_tracks = match.assign_items(
+ items, trackinfo
+ )
self.assertEqual(extra_items, [])
self.assertEqual(extra_tracks, [])
- self.assertEqual(mapping, {
- items[0]: trackinfo[0],
- items[1]: trackinfo[2],
- items[2]: trackinfo[1],
- })
+ self.assertEqual(
+ mapping,
+ {
+ items[0]: trackinfo[0],
+ items[1]: trackinfo[2],
+ items[2]: trackinfo[1],
+ },
+ )
def test_order_works_with_missing_tracks(self):
items = []
- items.append(self.item('one', 1))
- items.append(self.item('three', 3))
+ items.append(self.item("one", 1))
+ items.append(self.item("three", 3))
trackinfo = []
- trackinfo.append(TrackInfo(title='one'))
- trackinfo.append(TrackInfo(title='two'))
- trackinfo.append(TrackInfo(title='three'))
- mapping, extra_items, extra_tracks = \
- match.assign_items(items, trackinfo)
+ trackinfo.append(TrackInfo(title="one"))
+ trackinfo.append(TrackInfo(title="two"))
+ trackinfo.append(TrackInfo(title="three"))
+ mapping, extra_items, extra_tracks = match.assign_items(
+ items, trackinfo
+ )
self.assertEqual(extra_items, [])
self.assertEqual(extra_tracks, [trackinfo[1]])
- self.assertEqual(mapping, {
- items[0]: trackinfo[0],
- items[1]: trackinfo[2],
- })
+ self.assertEqual(
+ mapping,
+ {
+ items[0]: trackinfo[0],
+ items[1]: trackinfo[2],
+ },
+ )
def test_order_works_with_extra_tracks(self):
items = []
- items.append(self.item('one', 1))
- items.append(self.item('two', 2))
- items.append(self.item('three', 3))
+ items.append(self.item("one", 1))
+ items.append(self.item("two", 2))
+ items.append(self.item("three", 3))
trackinfo = []
- trackinfo.append(TrackInfo(title='one'))
- trackinfo.append(TrackInfo(title='three'))
- mapping, extra_items, extra_tracks = \
- match.assign_items(items, trackinfo)
+ trackinfo.append(TrackInfo(title="one"))
+ trackinfo.append(TrackInfo(title="three"))
+ mapping, extra_items, extra_tracks = match.assign_items(
+ items, trackinfo
+ )
self.assertEqual(extra_items, [items[1]])
self.assertEqual(extra_tracks, [])
- self.assertEqual(mapping, {
- items[0]: trackinfo[0],
- items[2]: trackinfo[1],
- })
+ self.assertEqual(
+ mapping,
+ {
+ items[0]: trackinfo[0],
+ items[2]: trackinfo[1],
+ },
+ )
def test_order_works_when_track_names_are_entirely_wrong(self):
# A real-world test case contributed by a user.
def item(i, length):
return Item(
- artist='ben harper',
- album='burn to shine',
- title=f'ben harper - Burn to Shine {i}',
+ artist="ben harper",
+ album="burn to shine",
+ title=f"ben harper - Burn to Shine {i}",
track=i,
length=length,
- mb_trackid='', mb_albumid='', mb_artistid='',
+ mb_trackid="",
+ mb_albumid="",
+ mb_artistid="",
)
+
items = []
items.append(item(1, 241.37243007106997))
items.append(item(2, 342.27781704375036))
@@ -577,24 +627,25 @@ def item(i, length):
items.append(item(12, 186.45916150485752))
def info(index, title, length):
- return TrackInfo(title=title, length=length,
- index=index)
+ return TrackInfo(title=title, length=length, index=index)
+
trackinfo = []
- trackinfo.append(info(1, 'Alone', 238.893))
- trackinfo.append(info(2, 'The Woman in You', 341.44))
- trackinfo.append(info(3, 'Less', 245.59999999999999))
- trackinfo.append(info(4, 'Two Hands of a Prayer', 470.49299999999999))
- trackinfo.append(info(5, 'Please Bleed', 277.86599999999999))
- trackinfo.append(info(6, 'Suzie Blue', 269.30599999999998))
- trackinfo.append(info(7, 'Steal My Kisses', 245.36000000000001))
- trackinfo.append(info(8, 'Burn to Shine', 214.90600000000001))
- trackinfo.append(info(9, 'Show Me a Little Shame', 224.0929999999999))
- trackinfo.append(info(10, 'Forgiven', 317.19999999999999))
- trackinfo.append(info(11, 'Beloved One', 243.733))
- trackinfo.append(info(12, 'In the Lord\'s Arms', 186.13300000000001))
-
- mapping, extra_items, extra_tracks = \
- match.assign_items(items, trackinfo)
+ trackinfo.append(info(1, "Alone", 238.893))
+ trackinfo.append(info(2, "The Woman in You", 341.44))
+ trackinfo.append(info(3, "Less", 245.59999999999999))
+ trackinfo.append(info(4, "Two Hands of a Prayer", 470.49299999999999))
+ trackinfo.append(info(5, "Please Bleed", 277.86599999999999))
+ trackinfo.append(info(6, "Suzie Blue", 269.30599999999998))
+ trackinfo.append(info(7, "Steal My Kisses", 245.36000000000001))
+ trackinfo.append(info(8, "Burn to Shine", 214.90600000000001))
+ trackinfo.append(info(9, "Show Me a Little Shame", 224.0929999999999))
+ trackinfo.append(info(10, "Forgiven", 317.19999999999999))
+ trackinfo.append(info(11, "Beloved One", 243.733))
+ trackinfo.append(info(12, "In the Lord's Arms", 186.13300000000001))
+
+ mapping, extra_items, extra_tracks = match.assign_items(
+ items, trackinfo
+ )
self.assertEqual(extra_items, [])
self.assertEqual(extra_tracks, [])
for item, info in mapping.items():
@@ -607,8 +658,8 @@ def _apply(self, info=None, per_disc_numbering=False, artist_credit=False):
mapping = {}
for i, t in zip(self.items, info.tracks):
mapping[i] = t
- config['per_disc_numbering'] = per_disc_numbering
- config['artist_credit'] = artist_credit
+ config["per_disc_numbering"] = per_disc_numbering
+ config["artist_credit"] = artist_credit
autotag.apply_metadata(info, mapping)
@@ -620,64 +671,68 @@ def setUp(self):
self.items.append(Item({}))
self.items.append(Item({}))
trackinfo = []
- trackinfo.append(TrackInfo(
- title='oneNew',
- track_id='dfa939ec-118c-4d0f-84a0-60f3d1e6522c',
- medium=1,
- medium_index=1,
- medium_total=1,
- index=1,
- artist_credit='trackArtistCredit',
- artists_credit=['trackArtistCredit'],
- artist_sort='trackArtistSort',
- artists_sort=['trackArtistSort'],
- ))
- trackinfo.append(TrackInfo(
- title='twoNew',
- track_id='40130ed1-a27c-42fd-a328-1ebefb6caef4',
- medium=2,
- medium_index=1,
- index=2,
- medium_total=1,
- ))
+ trackinfo.append(
+ TrackInfo(
+ title="oneNew",
+ track_id="dfa939ec-118c-4d0f-84a0-60f3d1e6522c",
+ medium=1,
+ medium_index=1,
+ medium_total=1,
+ index=1,
+ artist_credit="trackArtistCredit",
+ artists_credit=["trackArtistCredit"],
+ artist_sort="trackArtistSort",
+ artists_sort=["trackArtistSort"],
+ )
+ )
+ trackinfo.append(
+ TrackInfo(
+ title="twoNew",
+ track_id="40130ed1-a27c-42fd-a328-1ebefb6caef4",
+ medium=2,
+ medium_index=1,
+ index=2,
+ medium_total=1,
+ )
+ )
self.info = AlbumInfo(
tracks=trackinfo,
- artist='artistNew',
- artists=['artistNew', 'artistNew2'],
- album='albumNew',
- album_id='7edb51cb-77d6-4416-a23c-3a8c2994a2c7',
- artist_id='a6623d39-2d8e-4f70-8242-0a9553b91e50',
+ artist="artistNew",
+ artists=["artistNew", "artistNew2"],
+ album="albumNew",
+ album_id="7edb51cb-77d6-4416-a23c-3a8c2994a2c7",
+ artist_id="a6623d39-2d8e-4f70-8242-0a9553b91e50",
artists_ids=[
- 'a6623d39-2d8e-4f70-8242-0a9553b91e50',
- 'a6623d39-2d8e-4f70-8242-0a9553b91e51'
+ "a6623d39-2d8e-4f70-8242-0a9553b91e50",
+ "a6623d39-2d8e-4f70-8242-0a9553b91e51",
],
- artist_credit='albumArtistCredit',
- artists_credit=['albumArtistCredit', 'albumArtistCredit2'],
- artist_sort='albumArtistSort',
- artists_sort=['albumArtistSort', 'albumArtistSort2'],
- albumtype='album',
+ artist_credit="albumArtistCredit",
+ artists_credit=["albumArtistCredit", "albumArtistCredit2"],
+ artist_sort="albumArtistSort",
+ artists_sort=["albumArtistSort", "albumArtistSort2"],
+ albumtype="album",
va=False,
mediums=2,
)
def test_titles_applied(self):
self._apply()
- self.assertEqual(self.items[0].title, 'oneNew')
- self.assertEqual(self.items[1].title, 'twoNew')
+ self.assertEqual(self.items[0].title, "oneNew")
+ self.assertEqual(self.items[1].title, "twoNew")
def test_album_and_artist_applied_to_all(self):
self._apply()
- self.assertEqual(self.items[0].album, 'albumNew')
- self.assertEqual(self.items[1].album, 'albumNew')
- self.assertEqual(self.items[0].artist, 'artistNew')
- self.assertEqual(self.items[1].artist, 'artistNew')
- self.assertEqual(self.items[0].artists, ['artistNew', 'artistNew2'])
- self.assertEqual(self.items[1].artists, ['artistNew', 'artistNew2'])
+ self.assertEqual(self.items[0].album, "albumNew")
+ self.assertEqual(self.items[1].album, "albumNew")
+ self.assertEqual(self.items[0].artist, "artistNew")
+ self.assertEqual(self.items[1].artist, "artistNew")
+ self.assertEqual(self.items[0].artists, ["artistNew", "artistNew2"])
+ self.assertEqual(self.items[1].artists, ["artistNew", "artistNew2"])
self.assertEqual(
- self.items[0].albumartists, ['artistNew', 'artistNew2']
+ self.items[0].albumartists, ["artistNew", "artistNew2"]
)
self.assertEqual(
- self.items[1].albumartists, ['artistNew', 'artistNew2']
+ self.items[1].albumartists, ["artistNew", "artistNew2"]
)
def test_track_index_applied(self):
@@ -712,115 +767,118 @@ def test_per_disc_numbering_track_total(self):
def test_artist_credit(self):
self._apply(artist_credit=True)
- self.assertEqual(self.items[0].artist, 'trackArtistCredit')
- self.assertEqual(self.items[1].artist, 'albumArtistCredit')
- self.assertEqual(self.items[0].albumartist, 'albumArtistCredit')
- self.assertEqual(self.items[1].albumartist, 'albumArtistCredit')
+ self.assertEqual(self.items[0].artist, "trackArtistCredit")
+ self.assertEqual(self.items[1].artist, "albumArtistCredit")
+ self.assertEqual(self.items[0].albumartist, "albumArtistCredit")
+ self.assertEqual(self.items[1].albumartist, "albumArtistCredit")
self.assertEqual(
self.items[0].albumartists,
- ['albumArtistCredit', 'albumArtistCredit2']
+ ["albumArtistCredit", "albumArtistCredit2"],
)
self.assertEqual(
self.items[1].albumartists,
- ['albumArtistCredit', 'albumArtistCredit2']
+ ["albumArtistCredit", "albumArtistCredit2"],
)
def test_artist_credit_prefers_artist_over_albumartist_credit(self):
- self.info.tracks[0].artist = 'oldArtist'
+ self.info.tracks[0].artist = "oldArtist"
self.info.tracks[0].artist_credit = None
self._apply(artist_credit=True)
- self.assertEqual(self.items[0].artist, 'oldArtist')
+ self.assertEqual(self.items[0].artist, "oldArtist")
def test_artist_credit_falls_back_to_albumartist(self):
self.info.artist_credit = None
self._apply(artist_credit=True)
- self.assertEqual(self.items[1].artist, 'artistNew')
+ self.assertEqual(self.items[1].artist, "artistNew")
def test_mb_trackid_applied(self):
self._apply()
- self.assertEqual(self.items[0].mb_trackid,
- 'dfa939ec-118c-4d0f-84a0-60f3d1e6522c')
- self.assertEqual(self.items[1].mb_trackid,
- '40130ed1-a27c-42fd-a328-1ebefb6caef4')
+ self.assertEqual(
+ self.items[0].mb_trackid, "dfa939ec-118c-4d0f-84a0-60f3d1e6522c"
+ )
+ self.assertEqual(
+ self.items[1].mb_trackid, "40130ed1-a27c-42fd-a328-1ebefb6caef4"
+ )
def test_mb_albumid_and_artistid_applied(self):
self._apply()
for item in self.items:
- self.assertEqual(item.mb_albumid,
- '7edb51cb-77d6-4416-a23c-3a8c2994a2c7')
- self.assertEqual(item.mb_artistid,
- 'a6623d39-2d8e-4f70-8242-0a9553b91e50')
+ self.assertEqual(
+ item.mb_albumid, "7edb51cb-77d6-4416-a23c-3a8c2994a2c7"
+ )
+ self.assertEqual(
+ item.mb_artistid, "a6623d39-2d8e-4f70-8242-0a9553b91e50"
+ )
self.assertEqual(
item.mb_artistids,
[
- 'a6623d39-2d8e-4f70-8242-0a9553b91e50',
- 'a6623d39-2d8e-4f70-8242-0a9553b91e51',
- ]
+ "a6623d39-2d8e-4f70-8242-0a9553b91e50",
+ "a6623d39-2d8e-4f70-8242-0a9553b91e51",
+ ],
)
def test_albumtype_applied(self):
self._apply()
- self.assertEqual(self.items[0].albumtype, 'album')
- self.assertEqual(self.items[1].albumtype, 'album')
+ self.assertEqual(self.items[0].albumtype, "album")
+ self.assertEqual(self.items[1].albumtype, "album")
def test_album_artist_overrides_empty_track_artist(self):
my_info = self.info.copy()
self._apply(info=my_info)
- self.assertEqual(self.items[0].artist, 'artistNew')
- self.assertEqual(self.items[1].artist, 'artistNew')
- self.assertEqual(self.items[0].artists, ['artistNew', 'artistNew2'])
- self.assertEqual(self.items[1].artists, ['artistNew', 'artistNew2'])
+ self.assertEqual(self.items[0].artist, "artistNew")
+ self.assertEqual(self.items[1].artist, "artistNew")
+ self.assertEqual(self.items[0].artists, ["artistNew", "artistNew2"])
+ self.assertEqual(self.items[1].artists, ["artistNew", "artistNew2"])
def test_album_artist_overridden_by_nonempty_track_artist(self):
my_info = self.info.copy()
- my_info.tracks[0].artist = 'artist1!'
- my_info.tracks[1].artist = 'artist2!'
- my_info.tracks[0].artists = ['artist1!', 'artist1!!']
- my_info.tracks[1].artists = ['artist2!', 'artist2!!']
+ my_info.tracks[0].artist = "artist1!"
+ my_info.tracks[1].artist = "artist2!"
+ my_info.tracks[0].artists = ["artist1!", "artist1!!"]
+ my_info.tracks[1].artists = ["artist2!", "artist2!!"]
self._apply(info=my_info)
- self.assertEqual(self.items[0].artist, 'artist1!')
- self.assertEqual(self.items[1].artist, 'artist2!')
- self.assertEqual(self.items[0].artists, ['artist1!', 'artist1!!'])
- self.assertEqual(self.items[1].artists, ['artist2!', 'artist2!!'])
+ self.assertEqual(self.items[0].artist, "artist1!")
+ self.assertEqual(self.items[1].artist, "artist2!")
+ self.assertEqual(self.items[0].artists, ["artist1!", "artist1!!"])
+ self.assertEqual(self.items[1].artists, ["artist2!", "artist2!!"])
def test_artist_credit_applied(self):
self._apply()
- self.assertEqual(self.items[0].albumartist_credit, 'albumArtistCredit')
+ self.assertEqual(self.items[0].albumartist_credit, "albumArtistCredit")
self.assertEqual(
self.items[0].albumartists_credit,
- ['albumArtistCredit', 'albumArtistCredit2']
+ ["albumArtistCredit", "albumArtistCredit2"],
)
- self.assertEqual(self.items[0].artist_credit, 'trackArtistCredit')
- self.assertEqual(self.items[0].artists_credit, ['trackArtistCredit'])
- self.assertEqual(self.items[1].albumartist_credit, 'albumArtistCredit')
+ self.assertEqual(self.items[0].artist_credit, "trackArtistCredit")
+ self.assertEqual(self.items[0].artists_credit, ["trackArtistCredit"])
+ self.assertEqual(self.items[1].albumartist_credit, "albumArtistCredit")
self.assertEqual(
self.items[1].albumartists_credit,
- ['albumArtistCredit', 'albumArtistCredit2']
+ ["albumArtistCredit", "albumArtistCredit2"],
)
- self.assertEqual(self.items[1].artist_credit, 'albumArtistCredit')
+ self.assertEqual(self.items[1].artist_credit, "albumArtistCredit")
self.assertEqual(
self.items[1].artists_credit,
- ['albumArtistCredit', 'albumArtistCredit2']
+ ["albumArtistCredit", "albumArtistCredit2"],
)
def test_artist_sort_applied(self):
self._apply()
- self.assertEqual(self.items[0].albumartist_sort, 'albumArtistSort')
+ self.assertEqual(self.items[0].albumartist_sort, "albumArtistSort")
self.assertEqual(
self.items[0].albumartists_sort,
- ['albumArtistSort', 'albumArtistSort2']
+ ["albumArtistSort", "albumArtistSort2"],
)
- self.assertEqual(self.items[0].artist_sort, 'trackArtistSort')
- self.assertEqual(self.items[0].artists_sort, ['trackArtistSort'])
- self.assertEqual(self.items[1].albumartist_sort, 'albumArtistSort')
+ self.assertEqual(self.items[0].artist_sort, "trackArtistSort")
+ self.assertEqual(self.items[0].artists_sort, ["trackArtistSort"])
+ self.assertEqual(self.items[1].albumartist_sort, "albumArtistSort")
self.assertEqual(
self.items[1].albumartists_sort,
- ['albumArtistSort', 'albumArtistSort2']
+ ["albumArtistSort", "albumArtistSort2"],
)
- self.assertEqual(self.items[1].artist_sort, 'albumArtistSort')
+ self.assertEqual(self.items[1].artist_sort, "albumArtistSort")
self.assertEqual(
- self.items[1].artists_sort,
- ['albumArtistSort', 'albumArtistSort2']
+ self.items[1].artists_sort, ["albumArtistSort", "albumArtistSort2"]
)
def test_full_date_applied(self):
@@ -860,10 +918,10 @@ def test_missing_date_applies_nothing(self):
def test_data_source_applied(self):
my_info = self.info.copy()
- my_info.data_source = 'MusicBrainz'
+ my_info.data_source = "MusicBrainz"
self._apply(info=my_info)
- self.assertEqual(self.items[0].data_source, 'MusicBrainz')
+ self.assertEqual(self.items[0].data_source, "MusicBrainz")
class ApplyCompilationTest(_common.TestCase, ApplyTestUtil):
@@ -874,46 +932,56 @@ def setUp(self):
self.items.append(Item({}))
self.items.append(Item({}))
trackinfo = []
- trackinfo.append(TrackInfo(
- title='oneNew',
- track_id='dfa939ec-118c-4d0f-84a0-60f3d1e6522c',
- artist='artistOneNew',
- artist_id='a05686fc-9db2-4c23-b99e-77f5db3e5282',
- index=1,
- ))
- trackinfo.append(TrackInfo(
- title='twoNew',
- track_id='40130ed1-a27c-42fd-a328-1ebefb6caef4',
- artist='artistTwoNew',
- artist_id='80b3cf5e-18fe-4c59-98c7-e5bb87210710',
- index=2,
- ))
+ trackinfo.append(
+ TrackInfo(
+ title="oneNew",
+ track_id="dfa939ec-118c-4d0f-84a0-60f3d1e6522c",
+ artist="artistOneNew",
+ artist_id="a05686fc-9db2-4c23-b99e-77f5db3e5282",
+ index=1,
+ )
+ )
+ trackinfo.append(
+ TrackInfo(
+ title="twoNew",
+ track_id="40130ed1-a27c-42fd-a328-1ebefb6caef4",
+ artist="artistTwoNew",
+ artist_id="80b3cf5e-18fe-4c59-98c7-e5bb87210710",
+ index=2,
+ )
+ )
self.info = AlbumInfo(
tracks=trackinfo,
- artist='variousNew',
- album='albumNew',
- album_id='3b69ea40-39b8-487f-8818-04b6eff8c21a',
- artist_id='89ad4ac3-39f7-470e-963a-56509c546377',
- albumtype='compilation',
+ artist="variousNew",
+ album="albumNew",
+ album_id="3b69ea40-39b8-487f-8818-04b6eff8c21a",
+ artist_id="89ad4ac3-39f7-470e-963a-56509c546377",
+ albumtype="compilation",
)
def test_album_and_track_artists_separate(self):
self._apply()
- self.assertEqual(self.items[0].artist, 'artistOneNew')
- self.assertEqual(self.items[1].artist, 'artistTwoNew')
- self.assertEqual(self.items[0].albumartist, 'variousNew')
- self.assertEqual(self.items[1].albumartist, 'variousNew')
+ self.assertEqual(self.items[0].artist, "artistOneNew")
+ self.assertEqual(self.items[1].artist, "artistTwoNew")
+ self.assertEqual(self.items[0].albumartist, "variousNew")
+ self.assertEqual(self.items[1].albumartist, "variousNew")
def test_mb_albumartistid_applied(self):
self._apply()
- self.assertEqual(self.items[0].mb_albumartistid,
- '89ad4ac3-39f7-470e-963a-56509c546377')
- self.assertEqual(self.items[1].mb_albumartistid,
- '89ad4ac3-39f7-470e-963a-56509c546377')
- self.assertEqual(self.items[0].mb_artistid,
- 'a05686fc-9db2-4c23-b99e-77f5db3e5282')
- self.assertEqual(self.items[1].mb_artistid,
- '80b3cf5e-18fe-4c59-98c7-e5bb87210710')
+ self.assertEqual(
+ self.items[0].mb_albumartistid,
+ "89ad4ac3-39f7-470e-963a-56509c546377",
+ )
+ self.assertEqual(
+ self.items[1].mb_albumartistid,
+ "89ad4ac3-39f7-470e-963a-56509c546377",
+ )
+ self.assertEqual(
+ self.items[0].mb_artistid, "a05686fc-9db2-4c23-b99e-77f5db3e5282"
+ )
+ self.assertEqual(
+ self.items[1].mb_artistid, "80b3cf5e-18fe-4c59-98c7-e5bb87210710"
+ )
def test_va_flag_cleared_does_not_set_comp(self):
self._apply()
@@ -930,77 +998,77 @@ def test_va_flag_sets_comp(self):
class StringDistanceTest(unittest.TestCase):
def test_equal_strings(self):
- dist = string_dist('Some String', 'Some String')
+ dist = string_dist("Some String", "Some String")
self.assertEqual(dist, 0.0)
def test_different_strings(self):
- dist = string_dist('Some String', 'Totally Different')
+ dist = string_dist("Some String", "Totally Different")
self.assertNotEqual(dist, 0.0)
def test_punctuation_ignored(self):
- dist = string_dist('Some String', 'Some.String!')
+ dist = string_dist("Some String", "Some.String!")
self.assertEqual(dist, 0.0)
def test_case_ignored(self):
- dist = string_dist('Some String', 'sOME sTring')
+ dist = string_dist("Some String", "sOME sTring")
self.assertEqual(dist, 0.0)
def test_leading_the_has_lower_weight(self):
- dist1 = string_dist('XXX Band Name', 'Band Name')
- dist2 = string_dist('The Band Name', 'Band Name')
+ dist1 = string_dist("XXX Band Name", "Band Name")
+ dist2 = string_dist("The Band Name", "Band Name")
self.assertTrue(dist2 < dist1)
def test_parens_have_lower_weight(self):
- dist1 = string_dist('One .Two.', 'One')
- dist2 = string_dist('One (Two)', 'One')
+ dist1 = string_dist("One .Two.", "One")
+ dist2 = string_dist("One (Two)", "One")
self.assertTrue(dist2 < dist1)
def test_brackets_have_lower_weight(self):
- dist1 = string_dist('One .Two.', 'One')
- dist2 = string_dist('One [Two]', 'One')
+ dist1 = string_dist("One .Two.", "One")
+ dist2 = string_dist("One [Two]", "One")
self.assertTrue(dist2 < dist1)
def test_ep_label_has_zero_weight(self):
- dist = string_dist('My Song (EP)', 'My Song')
+ dist = string_dist("My Song (EP)", "My Song")
self.assertEqual(dist, 0.0)
def test_featured_has_lower_weight(self):
- dist1 = string_dist('My Song blah Someone', 'My Song')
- dist2 = string_dist('My Song feat Someone', 'My Song')
+ dist1 = string_dist("My Song blah Someone", "My Song")
+ dist2 = string_dist("My Song feat Someone", "My Song")
self.assertTrue(dist2 < dist1)
def test_postfix_the(self):
- dist = string_dist('The Song Title', 'Song Title, The')
+ dist = string_dist("The Song Title", "Song Title, The")
self.assertEqual(dist, 0.0)
def test_postfix_a(self):
- dist = string_dist('A Song Title', 'Song Title, A')
+ dist = string_dist("A Song Title", "Song Title, A")
self.assertEqual(dist, 0.0)
def test_postfix_an(self):
- dist = string_dist('An Album Title', 'Album Title, An')
+ dist = string_dist("An Album Title", "Album Title, An")
self.assertEqual(dist, 0.0)
def test_empty_strings(self):
- dist = string_dist('', '')
+ dist = string_dist("", "")
self.assertEqual(dist, 0.0)
def test_solo_pattern(self):
# Just make sure these don't crash.
- string_dist('The ', '')
- string_dist('(EP)', '(EP)')
- string_dist(', An', '')
+ string_dist("The ", "")
+ string_dist("(EP)", "(EP)")
+ string_dist(", An", "")
def test_heuristic_does_not_harm_distance(self):
- dist = string_dist('Untitled', '[Untitled]')
+ dist = string_dist("Untitled", "[Untitled]")
self.assertEqual(dist, 0.0)
def test_ampersand_expansion(self):
- dist = string_dist('And', '&')
+ dist = string_dist("And", "&")
self.assertEqual(dist, 0.0)
def test_accented_characters(self):
- dist = string_dist('\xe9\xe1\xf1', 'ean')
+ dist = string_dist("\xe9\xe1\xf1", "ean")
self.assertEqual(dist, 0.0)
@@ -1008,8 +1076,11 @@ class EnumTest(_common.TestCase):
"""
Test Enum Subclasses defined in beets.util.enumeration
"""
+
def test_ordered_enum(self):
- OrderedEnumClass = match.OrderedEnum('OrderedEnumTest', ['a', 'b', 'c']) # noqa
+ OrderedEnumClass = match.OrderedEnum(
+ "OrderedEnumTest", ["a", "b", "c"]
+ ) # noqa
self.assertLess(OrderedEnumClass.a, OrderedEnumClass.b)
self.assertLess(OrderedEnumClass.a, OrderedEnumClass.c)
self.assertLess(OrderedEnumClass.b, OrderedEnumClass.c)
@@ -1021,5 +1092,6 @@ def test_ordered_enum(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_config_command.py b/test/test_config_command.py
index 14e9df32b9..77fddee028 100644
--- a/test/test_config_command.py
+++ b/test/test_config_command.py
@@ -1,38 +1,36 @@
import os
-import yaml
-from unittest.mock import patch
-from tempfile import mkdtemp
-from shutil import rmtree
import unittest
+from shutil import rmtree
+from tempfile import mkdtemp
+from test.helper import TestHelper
+from unittest.mock import patch
-from beets import ui
-from beets import config
+import yaml
-from test.helper import TestHelper
+from beets import config, ui
from beets.library import Library
class ConfigCommandTest(unittest.TestCase, TestHelper):
-
def setUp(self):
- self.lib = Library(':memory:')
+ self.lib = Library(":memory:")
self.temp_dir = mkdtemp()
- if 'EDITOR' in os.environ:
- del os.environ['EDITOR']
+ if "EDITOR" in os.environ:
+ del os.environ["EDITOR"]
- os.environ['BEETSDIR'] = self.temp_dir
- self.config_path = os.path.join(self.temp_dir, 'config.yaml')
- with open(self.config_path, 'w') as file:
- file.write('library: lib\n')
- file.write('option: value\n')
- file.write('password: password_value')
+ os.environ["BEETSDIR"] = self.temp_dir
+ self.config_path = os.path.join(self.temp_dir, "config.yaml")
+ with open(self.config_path, "w") as file:
+ file.write("library: lib\n")
+ file.write("option: value\n")
+ file.write("password: password_value")
- self.cli_config_path = os.path.join(self.temp_dir, 'cli_config.yaml')
- with open(self.cli_config_path, 'w') as file:
- file.write('option: cli overwrite')
+ self.cli_config_path = os.path.join(self.temp_dir, "cli_config.yaml")
+ with open(self.cli_config_path, "w") as file:
+ file.write("option: cli overwrite")
config.clear()
- config['password'].redact = True
+ config["password"].redact = True
config._materialized = False
def tearDown(self):
@@ -43,92 +41,93 @@ def _run_with_yaml_output(self, *args):
return yaml.safe_load(output)
def test_show_user_config(self):
- output = self._run_with_yaml_output('config', '-c')
+ output = self._run_with_yaml_output("config", "-c")
- self.assertEqual(output['option'], 'value')
- self.assertEqual(output['password'], 'password_value')
+ self.assertEqual(output["option"], "value")
+ self.assertEqual(output["password"], "password_value")
def test_show_user_config_with_defaults(self):
- output = self._run_with_yaml_output('config', '-dc')
+ output = self._run_with_yaml_output("config", "-dc")
- self.assertEqual(output['option'], 'value')
- self.assertEqual(output['password'], 'password_value')
- self.assertEqual(output['library'], 'lib')
- self.assertEqual(output['import']['timid'], False)
+ self.assertEqual(output["option"], "value")
+ self.assertEqual(output["password"], "password_value")
+ self.assertEqual(output["library"], "lib")
+ self.assertEqual(output["import"]["timid"], False)
def test_show_user_config_with_cli(self):
- output = self._run_with_yaml_output('--config', self.cli_config_path,
- 'config')
+ output = self._run_with_yaml_output(
+ "--config", self.cli_config_path, "config"
+ )
- self.assertEqual(output['library'], 'lib')
- self.assertEqual(output['option'], 'cli overwrite')
+ self.assertEqual(output["library"], "lib")
+ self.assertEqual(output["option"], "cli overwrite")
def test_show_redacted_user_config(self):
- output = self._run_with_yaml_output('config')
+ output = self._run_with_yaml_output("config")
- self.assertEqual(output['option'], 'value')
- self.assertEqual(output['password'], 'REDACTED')
+ self.assertEqual(output["option"], "value")
+ self.assertEqual(output["password"], "REDACTED")
def test_show_redacted_user_config_with_defaults(self):
- output = self._run_with_yaml_output('config', '-d')
+ output = self._run_with_yaml_output("config", "-d")
- self.assertEqual(output['option'], 'value')
- self.assertEqual(output['password'], 'REDACTED')
- self.assertEqual(output['import']['timid'], False)
+ self.assertEqual(output["option"], "value")
+ self.assertEqual(output["password"], "REDACTED")
+ self.assertEqual(output["import"]["timid"], False)
def test_config_paths(self):
- output = self.run_with_output('config', '-p')
+ output = self.run_with_output("config", "-p")
- paths = output.split('\n')
+ paths = output.split("\n")
self.assertEqual(len(paths), 2)
self.assertEqual(paths[0], self.config_path)
def test_config_paths_with_cli(self):
- output = self.run_with_output('--config', self.cli_config_path,
- 'config', '-p')
- paths = output.split('\n')
+ output = self.run_with_output(
+ "--config", self.cli_config_path, "config", "-p"
+ )
+ paths = output.split("\n")
self.assertEqual(len(paths), 3)
self.assertEqual(paths[0], self.cli_config_path)
def test_edit_config_with_editor_env(self):
- os.environ['EDITOR'] = 'myeditor'
- with patch('os.execlp') as execlp:
- self.run_command('config', '-e')
- execlp.assert_called_once_with(
- 'myeditor', 'myeditor', self.config_path)
+ os.environ["EDITOR"] = "myeditor"
+ with patch("os.execlp") as execlp:
+ self.run_command("config", "-e")
+ execlp.assert_called_once_with("myeditor", "myeditor", self.config_path)
def test_edit_config_with_automatic_open(self):
- with patch('beets.util.open_anything') as open:
- open.return_value = 'please_open'
- with patch('os.execlp') as execlp:
- self.run_command('config', '-e')
+ with patch("beets.util.open_anything") as open:
+ open.return_value = "please_open"
+ with patch("os.execlp") as execlp:
+ self.run_command("config", "-e")
execlp.assert_called_once_with(
- 'please_open', 'please_open', self.config_path)
+ "please_open", "please_open", self.config_path
+ )
def test_config_editor_not_found(self):
with self.assertRaises(ui.UserError) as user_error:
- with patch('os.execlp') as execlp:
- execlp.side_effect = OSError('here is problem')
- self.run_command('config', '-e')
- self.assertIn('Could not edit configuration',
- str(user_error.exception))
- self.assertIn('here is problem', str(user_error.exception))
+ with patch("os.execlp") as execlp:
+ execlp.side_effect = OSError("here is problem")
+ self.run_command("config", "-e")
+ self.assertIn("Could not edit configuration", str(user_error.exception))
+ self.assertIn("here is problem", str(user_error.exception))
def test_edit_invalid_config_file(self):
- with open(self.config_path, 'w') as file:
- file.write('invalid: [')
+ with open(self.config_path, "w") as file:
+ file.write("invalid: [")
config.clear()
config._materialized = False
- os.environ['EDITOR'] = 'myeditor'
- with patch('os.execlp') as execlp:
- self.run_command('config', '-e')
- execlp.assert_called_once_with(
- 'myeditor', 'myeditor', self.config_path)
+ os.environ["EDITOR"] = "myeditor"
+ with patch("os.execlp") as execlp:
+ self.run_command("config", "-e")
+ execlp.assert_called_once_with("myeditor", "myeditor", self.config_path)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_datequery.py b/test/test_datequery.py
index 7432cffc62..50066af66d 100644
--- a/test/test_datequery.py
+++ b/test/test_datequery.py
@@ -15,105 +15,126 @@
"""Test for dbcore's date-based queries.
"""
-from test import _common
-from datetime import datetime, timedelta
-import unittest
import time
-from beets.dbcore.query import _parse_periods, DateInterval, DateQuery, \
- InvalidQueryArgumentValueError
+import unittest
+from datetime import datetime, timedelta
+from test import _common
+
+from beets.dbcore.query import (
+ DateInterval,
+ DateQuery,
+ InvalidQueryArgumentValueError,
+ _parse_periods,
+)
def _date(string):
- return datetime.strptime(string, '%Y-%m-%dT%H:%M:%S')
+ return datetime.strptime(string, "%Y-%m-%dT%H:%M:%S")
def _datepattern(datetimedate):
- return datetimedate.strftime('%Y-%m-%dT%H:%M:%S')
+ return datetimedate.strftime("%Y-%m-%dT%H:%M:%S")
class DateIntervalTest(unittest.TestCase):
def test_year_precision_intervals(self):
- self.assertContains('2000..2001', '2000-01-01T00:00:00')
- self.assertContains('2000..2001', '2001-06-20T14:15:16')
- self.assertContains('2000..2001', '2001-12-31T23:59:59')
- self.assertExcludes('2000..2001', '1999-12-31T23:59:59')
- self.assertExcludes('2000..2001', '2002-01-01T00:00:00')
+ self.assertContains("2000..2001", "2000-01-01T00:00:00")
+ self.assertContains("2000..2001", "2001-06-20T14:15:16")
+ self.assertContains("2000..2001", "2001-12-31T23:59:59")
+ self.assertExcludes("2000..2001", "1999-12-31T23:59:59")
+ self.assertExcludes("2000..2001", "2002-01-01T00:00:00")
- self.assertContains('2000..', '2000-01-01T00:00:00')
- self.assertContains('2000..', '2099-10-11T00:00:00')
- self.assertExcludes('2000..', '1999-12-31T23:59:59')
+ self.assertContains("2000..", "2000-01-01T00:00:00")
+ self.assertContains("2000..", "2099-10-11T00:00:00")
+ self.assertExcludes("2000..", "1999-12-31T23:59:59")
- self.assertContains('..2001', '2001-12-31T23:59:59')
- self.assertExcludes('..2001', '2002-01-01T00:00:00')
+ self.assertContains("..2001", "2001-12-31T23:59:59")
+ self.assertExcludes("..2001", "2002-01-01T00:00:00")
- self.assertContains('-1d..1d', _datepattern(datetime.now()))
- self.assertExcludes('-2d..-1d', _datepattern(datetime.now()))
+ self.assertContains("-1d..1d", _datepattern(datetime.now()))
+ self.assertExcludes("-2d..-1d", _datepattern(datetime.now()))
def test_day_precision_intervals(self):
- self.assertContains('2000-06-20..2000-06-20', '2000-06-20T00:00:00')
- self.assertContains('2000-06-20..2000-06-20', '2000-06-20T10:20:30')
- self.assertContains('2000-06-20..2000-06-20', '2000-06-20T23:59:59')
- self.assertExcludes('2000-06-20..2000-06-20', '2000-06-19T23:59:59')
- self.assertExcludes('2000-06-20..2000-06-20', '2000-06-21T00:00:00')
+ self.assertContains("2000-06-20..2000-06-20", "2000-06-20T00:00:00")
+ self.assertContains("2000-06-20..2000-06-20", "2000-06-20T10:20:30")
+ self.assertContains("2000-06-20..2000-06-20", "2000-06-20T23:59:59")
+ self.assertExcludes("2000-06-20..2000-06-20", "2000-06-19T23:59:59")
+ self.assertExcludes("2000-06-20..2000-06-20", "2000-06-21T00:00:00")
def test_month_precision_intervals(self):
- self.assertContains('1999-12..2000-02', '1999-12-01T00:00:00')
- self.assertContains('1999-12..2000-02', '2000-02-15T05:06:07')
- self.assertContains('1999-12..2000-02', '2000-02-29T23:59:59')
- self.assertExcludes('1999-12..2000-02', '1999-11-30T23:59:59')
- self.assertExcludes('1999-12..2000-02', '2000-03-01T00:00:00')
+ self.assertContains("1999-12..2000-02", "1999-12-01T00:00:00")
+ self.assertContains("1999-12..2000-02", "2000-02-15T05:06:07")
+ self.assertContains("1999-12..2000-02", "2000-02-29T23:59:59")
+ self.assertExcludes("1999-12..2000-02", "1999-11-30T23:59:59")
+ self.assertExcludes("1999-12..2000-02", "2000-03-01T00:00:00")
def test_hour_precision_intervals(self):
# test with 'T' separator
- self.assertExcludes('2000-01-01T12..2000-01-01T13',
- '2000-01-01T11:59:59')
- self.assertContains('2000-01-01T12..2000-01-01T13',
- '2000-01-01T12:00:00')
- self.assertContains('2000-01-01T12..2000-01-01T13',
- '2000-01-01T12:30:00')
- self.assertContains('2000-01-01T12..2000-01-01T13',
- '2000-01-01T13:30:00')
- self.assertContains('2000-01-01T12..2000-01-01T13',
- '2000-01-01T13:59:59')
- self.assertExcludes('2000-01-01T12..2000-01-01T13',
- '2000-01-01T14:00:00')
- self.assertExcludes('2000-01-01T12..2000-01-01T13',
- '2000-01-01T14:30:00')
+ self.assertExcludes(
+ "2000-01-01T12..2000-01-01T13", "2000-01-01T11:59:59"
+ )
+ self.assertContains(
+ "2000-01-01T12..2000-01-01T13", "2000-01-01T12:00:00"
+ )
+ self.assertContains(
+ "2000-01-01T12..2000-01-01T13", "2000-01-01T12:30:00"
+ )
+ self.assertContains(
+ "2000-01-01T12..2000-01-01T13", "2000-01-01T13:30:00"
+ )
+ self.assertContains(
+ "2000-01-01T12..2000-01-01T13", "2000-01-01T13:59:59"
+ )
+ self.assertExcludes(
+ "2000-01-01T12..2000-01-01T13", "2000-01-01T14:00:00"
+ )
+ self.assertExcludes(
+ "2000-01-01T12..2000-01-01T13", "2000-01-01T14:30:00"
+ )
# test non-range query
- self.assertContains('2008-12-01T22',
- '2008-12-01T22:30:00')
- self.assertExcludes('2008-12-01T22',
- '2008-12-01T23:30:00')
+ self.assertContains("2008-12-01T22", "2008-12-01T22:30:00")
+ self.assertExcludes("2008-12-01T22", "2008-12-01T23:30:00")
def test_minute_precision_intervals(self):
- self.assertExcludes('2000-01-01T12:30..2000-01-01T12:31',
- '2000-01-01T12:29:59')
- self.assertContains('2000-01-01T12:30..2000-01-01T12:31',
- '2000-01-01T12:30:00')
- self.assertContains('2000-01-01T12:30..2000-01-01T12:31',
- '2000-01-01T12:30:30')
- self.assertContains('2000-01-01T12:30..2000-01-01T12:31',
- '2000-01-01T12:31:59')
- self.assertExcludes('2000-01-01T12:30..2000-01-01T12:31',
- '2000-01-01T12:32:00')
+ self.assertExcludes(
+ "2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:29:59"
+ )
+ self.assertContains(
+ "2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:30:00"
+ )
+ self.assertContains(
+ "2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:30:30"
+ )
+ self.assertContains(
+ "2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:31:59"
+ )
+ self.assertExcludes(
+ "2000-01-01T12:30..2000-01-01T12:31", "2000-01-01T12:32:00"
+ )
def test_second_precision_intervals(self):
- self.assertExcludes('2000-01-01T12:30:50..2000-01-01T12:30:55',
- '2000-01-01T12:30:49')
- self.assertContains('2000-01-01T12:30:50..2000-01-01T12:30:55',
- '2000-01-01T12:30:50')
- self.assertContains('2000-01-01T12:30:50..2000-01-01T12:30:55',
- '2000-01-01T12:30:55')
- self.assertExcludes('2000-01-01T12:30:50..2000-01-01T12:30:55',
- '2000-01-01T12:30:56')
+ self.assertExcludes(
+ "2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:49"
+ )
+ self.assertContains(
+ "2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:50"
+ )
+ self.assertContains(
+ "2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:55"
+ )
+ self.assertExcludes(
+ "2000-01-01T12:30:50..2000-01-01T12:30:55", "2000-01-01T12:30:56"
+ )
def test_unbounded_endpoints(self):
- self.assertContains('..', date=datetime.max)
- self.assertContains('..', date=datetime.min)
- self.assertContains('..', '1000-01-01T00:00:00')
+ self.assertContains("..", date=datetime.max)
+ self.assertContains("..", date=datetime.min)
+ self.assertContains("..", "1000-01-01T00:00:00")
- def assertContains(self, interval_pattern, date_pattern=None, date=None): # noqa
+ def assertContains(
+ self, interval_pattern, date_pattern=None, date=None
+ ): # noqa
if date is None:
date = _date(date_pattern)
(start, end) = _parse_periods(interval_pattern)
@@ -128,40 +149,40 @@ def assertExcludes(self, interval_pattern, date_pattern): # noqa
def _parsetime(s):
- return time.mktime(datetime.strptime(s, '%Y-%m-%d %H:%M').timetuple())
+ return time.mktime(datetime.strptime(s, "%Y-%m-%d %H:%M").timetuple())
class DateQueryTest(_common.LibTestCase):
def setUp(self):
super().setUp()
- self.i.added = _parsetime('2013-03-30 22:21')
+ self.i.added = _parsetime("2013-03-30 22:21")
self.i.store()
def test_single_month_match_fast(self):
- query = DateQuery('added', '2013-03')
+ query = DateQuery("added", "2013-03")
matched = self.lib.items(query)
self.assertEqual(len(matched), 1)
def test_single_month_nonmatch_fast(self):
- query = DateQuery('added', '2013-04')
+ query = DateQuery("added", "2013-04")
matched = self.lib.items(query)
self.assertEqual(len(matched), 0)
def test_single_month_match_slow(self):
- query = DateQuery('added', '2013-03')
+ query = DateQuery("added", "2013-03")
self.assertTrue(query.match(self.i))
def test_single_month_nonmatch_slow(self):
- query = DateQuery('added', '2013-04')
+ query = DateQuery("added", "2013-04")
self.assertFalse(query.match(self.i))
def test_single_day_match_fast(self):
- query = DateQuery('added', '2013-03-30')
+ query = DateQuery("added", "2013-03-30")
matched = self.lib.items(query)
self.assertEqual(len(matched), 1)
def test_single_day_nonmatch_fast(self):
- query = DateQuery('added', '2013-03-31')
+ query = DateQuery("added", "2013-03-31")
matched = self.lib.items(query)
self.assertEqual(len(matched), 0)
@@ -174,37 +195,40 @@ def setUp(self):
# zone bugs.
self._now = datetime(2017, 12, 31, 22, 55, 4, 101332)
- self.i.added = _parsetime(self._now.strftime('%Y-%m-%d %H:%M'))
+ self.i.added = _parsetime(self._now.strftime("%Y-%m-%d %H:%M"))
self.i.store()
def test_single_month_match_fast(self):
- query = DateQuery('added', self._now.strftime('%Y-%m'))
+ query = DateQuery("added", self._now.strftime("%Y-%m"))
matched = self.lib.items(query)
self.assertEqual(len(matched), 1)
def test_single_month_nonmatch_fast(self):
- query = DateQuery('added', (self._now + timedelta(days=30))
- .strftime('%Y-%m'))
+ query = DateQuery(
+ "added", (self._now + timedelta(days=30)).strftime("%Y-%m")
+ )
matched = self.lib.items(query)
self.assertEqual(len(matched), 0)
def test_single_month_match_slow(self):
- query = DateQuery('added', self._now.strftime('%Y-%m'))
+ query = DateQuery("added", self._now.strftime("%Y-%m"))
self.assertTrue(query.match(self.i))
def test_single_month_nonmatch_slow(self):
- query = DateQuery('added', (self._now + timedelta(days=30))
- .strftime('%Y-%m'))
+ query = DateQuery(
+ "added", (self._now + timedelta(days=30)).strftime("%Y-%m")
+ )
self.assertFalse(query.match(self.i))
def test_single_day_match_fast(self):
- query = DateQuery('added', self._now.strftime('%Y-%m-%d'))
+ query = DateQuery("added", self._now.strftime("%Y-%m-%d"))
matched = self.lib.items(query)
self.assertEqual(len(matched), 1)
def test_single_day_nonmatch_fast(self):
- query = DateQuery('added', (self._now + timedelta(days=1))
- .strftime('%Y-%m-%d'))
+ query = DateQuery(
+ "added", (self._now + timedelta(days=1)).strftime("%Y-%m-%d")
+ )
matched = self.lib.items(query)
self.assertEqual(len(matched), 0)
@@ -212,42 +236,42 @@ def test_single_day_nonmatch_fast(self):
class DateQueryTestRelativeMore(_common.LibTestCase):
def setUp(self):
super().setUp()
- self.i.added = _parsetime(datetime.now().strftime('%Y-%m-%d %H:%M'))
+ self.i.added = _parsetime(datetime.now().strftime("%Y-%m-%d %H:%M"))
self.i.store()
def test_relative(self):
- for timespan in ['d', 'w', 'm', 'y']:
- query = DateQuery('added', '-4' + timespan + '..+4' + timespan)
+ for timespan in ["d", "w", "m", "y"]:
+ query = DateQuery("added", "-4" + timespan + "..+4" + timespan)
matched = self.lib.items(query)
self.assertEqual(len(matched), 1)
def test_relative_fail(self):
- for timespan in ['d', 'w', 'm', 'y']:
- query = DateQuery('added', '-2' + timespan + '..-1' + timespan)
+ for timespan in ["d", "w", "m", "y"]:
+ query = DateQuery("added", "-2" + timespan + "..-1" + timespan)
matched = self.lib.items(query)
self.assertEqual(len(matched), 0)
def test_start_relative(self):
- for timespan in ['d', 'w', 'm', 'y']:
- query = DateQuery('added', '-4' + timespan + '..')
+ for timespan in ["d", "w", "m", "y"]:
+ query = DateQuery("added", "-4" + timespan + "..")
matched = self.lib.items(query)
self.assertEqual(len(matched), 1)
def test_start_relative_fail(self):
- for timespan in ['d', 'w', 'm', 'y']:
- query = DateQuery('added', '4' + timespan + '..')
+ for timespan in ["d", "w", "m", "y"]:
+ query = DateQuery("added", "4" + timespan + "..")
matched = self.lib.items(query)
self.assertEqual(len(matched), 0)
def test_end_relative(self):
- for timespan in ['d', 'w', 'm', 'y']:
- query = DateQuery('added', '..+4' + timespan)
+ for timespan in ["d", "w", "m", "y"]:
+ query = DateQuery("added", "..+4" + timespan)
matched = self.lib.items(query)
self.assertEqual(len(matched), 1)
def test_end_relative_fail(self):
- for timespan in ['d', 'w', 'm', 'y']:
- query = DateQuery('added', '..-4' + timespan)
+ for timespan in ["d", "w", "m", "y"]:
+ query = DateQuery("added", "..-4" + timespan)
matched = self.lib.items(query)
self.assertEqual(len(matched), 0)
@@ -255,50 +279,50 @@ def test_end_relative_fail(self):
class DateQueryConstructTest(unittest.TestCase):
def test_long_numbers(self):
with self.assertRaises(InvalidQueryArgumentValueError):
- DateQuery('added', '1409830085..1412422089')
+ DateQuery("added", "1409830085..1412422089")
def test_too_many_components(self):
with self.assertRaises(InvalidQueryArgumentValueError):
- DateQuery('added', '12-34-56-78')
+ DateQuery("added", "12-34-56-78")
def test_invalid_date_query(self):
q_list = [
- '2001-01-0a',
- '2001-0a',
- '200a',
- '2001-01-01..2001-01-0a',
- '2001-0a..2001-01',
- '200a..2002',
- '20aa..',
- '..2aa'
+ "2001-01-0a",
+ "2001-0a",
+ "200a",
+ "2001-01-01..2001-01-0a",
+ "2001-0a..2001-01",
+ "200a..2002",
+ "20aa..",
+ "..2aa",
]
for q in q_list:
with self.assertRaises(InvalidQueryArgumentValueError):
- DateQuery('added', q)
+ DateQuery("added", q)
def test_datetime_uppercase_t_separator(self):
- date_query = DateQuery('added', '2000-01-01T12')
+ date_query = DateQuery("added", "2000-01-01T12")
self.assertEqual(date_query.interval.start, datetime(2000, 1, 1, 12))
self.assertEqual(date_query.interval.end, datetime(2000, 1, 1, 13))
def test_datetime_lowercase_t_separator(self):
- date_query = DateQuery('added', '2000-01-01t12')
+ date_query = DateQuery("added", "2000-01-01t12")
self.assertEqual(date_query.interval.start, datetime(2000, 1, 1, 12))
self.assertEqual(date_query.interval.end, datetime(2000, 1, 1, 13))
def test_datetime_space_separator(self):
- date_query = DateQuery('added', '2000-01-01 12')
+ date_query = DateQuery("added", "2000-01-01 12")
self.assertEqual(date_query.interval.start, datetime(2000, 1, 1, 12))
self.assertEqual(date_query.interval.end, datetime(2000, 1, 1, 13))
def test_datetime_invalid_separator(self):
with self.assertRaises(InvalidQueryArgumentValueError):
- DateQuery('added', '2000-01-01x12')
+ DateQuery("added", "2000-01-01x12")
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_dbcore.py b/test/test_dbcore.py
index 980ebd1378..541222be28 100644
--- a/test/test_dbcore.py
+++ b/test/test_dbcore.py
@@ -19,15 +19,15 @@
import shutil
import sqlite3
import unittest
-
-from test import _common
-from beets import dbcore
from tempfile import mkstemp
+from test import _common
+from beets import dbcore
# Fixture: concrete database and model classes. For migration tests, we
# have multiple models with different numbers of fields.
+
class SortFixture(dbcore.query.FieldSort):
pass
@@ -44,21 +44,21 @@ def match(self):
class ModelFixture1(dbcore.Model):
- _table = 'test'
- _flex_table = 'testflex'
+ _table = "test"
+ _flex_table = "testflex"
_fields = {
- 'id': dbcore.types.PRIMARY_ID,
- 'field_one': dbcore.types.INTEGER,
- 'field_two': dbcore.types.STRING,
+ "id": dbcore.types.PRIMARY_ID,
+ "field_one": dbcore.types.INTEGER,
+ "field_two": dbcore.types.STRING,
}
_types = {
- 'some_float_field': dbcore.types.FLOAT,
+ "some_float_field": dbcore.types.FLOAT,
}
_sorts = {
- 'some_sort': SortFixture,
+ "some_sort": SortFixture,
}
_queries = {
- 'some_query': QueryFixture,
+ "some_query": QueryFixture,
}
@classmethod
@@ -76,9 +76,9 @@ class DatabaseFixture1(dbcore.Database):
class ModelFixture2(ModelFixture1):
_fields = {
- 'id': dbcore.types.PRIMARY_ID,
- 'field_one': dbcore.types.INTEGER,
- 'field_two': dbcore.types.INTEGER,
+ "id": dbcore.types.PRIMARY_ID,
+ "field_one": dbcore.types.INTEGER,
+ "field_two": dbcore.types.INTEGER,
}
@@ -89,10 +89,10 @@ class DatabaseFixture2(dbcore.Database):
class ModelFixture3(ModelFixture1):
_fields = {
- 'id': dbcore.types.PRIMARY_ID,
- 'field_one': dbcore.types.INTEGER,
- 'field_two': dbcore.types.INTEGER,
- 'field_three': dbcore.types.INTEGER,
+ "id": dbcore.types.PRIMARY_ID,
+ "field_one": dbcore.types.INTEGER,
+ "field_two": dbcore.types.INTEGER,
+ "field_three": dbcore.types.INTEGER,
}
@@ -103,11 +103,11 @@ class DatabaseFixture3(dbcore.Database):
class ModelFixture4(ModelFixture1):
_fields = {
- 'id': dbcore.types.PRIMARY_ID,
- 'field_one': dbcore.types.INTEGER,
- 'field_two': dbcore.types.INTEGER,
- 'field_three': dbcore.types.INTEGER,
- 'field_four': dbcore.types.INTEGER,
+ "id": dbcore.types.PRIMARY_ID,
+ "field_one": dbcore.types.INTEGER,
+ "field_two": dbcore.types.INTEGER,
+ "field_three": dbcore.types.INTEGER,
+ "field_four": dbcore.types.INTEGER,
}
@@ -117,19 +117,19 @@ class DatabaseFixture4(dbcore.Database):
class AnotherModelFixture(ModelFixture1):
- _table = 'another'
- _flex_table = 'anotherflex'
+ _table = "another"
+ _flex_table = "anotherflex"
_fields = {
- 'id': dbcore.types.PRIMARY_ID,
- 'foo': dbcore.types.INTEGER,
+ "id": dbcore.types.PRIMARY_ID,
+ "foo": dbcore.types.INTEGER,
}
class ModelFixture5(ModelFixture1):
_fields = {
- 'some_string_field': dbcore.types.STRING,
- 'some_float_field': dbcore.types.FLOAT,
- 'some_boolean_field': dbcore.types.BOOLEAN,
+ "some_string_field": dbcore.types.STRING,
+ "some_float_field": dbcore.types.FLOAT,
+ "some_boolean_field": dbcore.types.BOOLEAN,
}
@@ -144,10 +144,9 @@ class DatabaseFixtureTwoModels(dbcore.Database):
class ModelFixtureWithGetters(dbcore.Model):
-
@classmethod
def _getters(cls):
- return {'aComputedField': (lambda s: 'thing')}
+ return {"aComputedField": (lambda s: "thing")}
def _template_funcs(self):
return {}
@@ -161,14 +160,14 @@ class MigrationTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
- handle, cls.orig_libfile = mkstemp('orig_db')
+ handle, cls.orig_libfile = mkstemp("orig_db")
os.close(handle)
# Set up a database with the two-field schema.
old_lib = DatabaseFixture2(cls.orig_libfile)
# Add an item to the old library.
old_lib._connection().execute(
- 'insert into test (field_one, field_two) values (4, 2)'
+ "insert into test (field_one, field_two) values (4, 2)"
)
old_lib._connection().commit()
old_lib._connection().close()
@@ -179,7 +178,7 @@ def tearDownClass(cls):
os.remove(cls.orig_libfile)
def setUp(self):
- handle, self.libfile = mkstemp('db')
+ handle, self.libfile = mkstemp("db")
os.close(handle)
shutil.copyfile(self.orig_libfile, self.libfile)
@@ -230,7 +229,7 @@ def test_extra_model_adds_table(self):
class TransactionTest(unittest.TestCase):
def setUp(self):
- self.db = DatabaseFixture1(':memory:')
+ self.db = DatabaseFixture1(":memory:")
def tearDown(self):
self.db._connection().close()
@@ -239,9 +238,9 @@ def test_mutate_increase_revision(self):
old_rev = self.db.revision
with self.db.transaction() as tx:
tx.mutate(
- 'INSERT INTO {} '
- '(field_one) '
- 'VALUES (?);'.format(ModelFixture1._table),
+ "INSERT INTO {} "
+ "(field_one) "
+ "VALUES (?);".format(ModelFixture1._table),
(111,),
)
self.assertGreater(self.db.revision, old_rev)
@@ -249,13 +248,13 @@ def test_mutate_increase_revision(self):
def test_query_no_increase_revision(self):
old_rev = self.db.revision
with self.db.transaction() as tx:
- tx.query('PRAGMA table_info(%s)' % ModelFixture1._table)
+ tx.query("PRAGMA table_info(%s)" % ModelFixture1._table)
self.assertEqual(self.db.revision, old_rev)
class ModelTest(unittest.TestCase):
def setUp(self):
- self.db = DatabaseFixture1(':memory:')
+ self.db = DatabaseFixture1(":memory:")
def tearDown(self):
self.db._connection().close()
@@ -263,7 +262,7 @@ def tearDown(self):
def test_add_model(self):
model = ModelFixture1()
model.add(self.db)
- rows = self.db._connection().execute('select * from test').fetchall()
+ rows = self.db._connection().execute("select * from test").fetchall()
self.assertEqual(len(rows), 1)
def test_store_fixed_field(self):
@@ -271,8 +270,8 @@ def test_store_fixed_field(self):
model.add(self.db)
model.field_one = 123
model.store()
- row = self.db._connection().execute('select * from test').fetchone()
- self.assertEqual(row['field_one'], 123)
+ row = self.db._connection().execute("select * from test").fetchone()
+ self.assertEqual(row["field_one"], 123)
def test_revision(self):
old_rev = self.db.revision
@@ -307,47 +306,47 @@ def test_retrieve_by_id(self):
def test_store_and_retrieve_flexattr(self):
model = ModelFixture1()
model.add(self.db)
- model.foo = 'bar'
+ model.foo = "bar"
model.store()
other_model = self.db._get(ModelFixture1, model.id)
- self.assertEqual(other_model.foo, 'bar')
+ self.assertEqual(other_model.foo, "bar")
def test_delete_flexattr(self):
model = ModelFixture1()
- model['foo'] = 'bar'
- self.assertTrue('foo' in model)
- del model['foo']
- self.assertFalse('foo' in model)
+ model["foo"] = "bar"
+ self.assertTrue("foo" in model)
+ del model["foo"]
+ self.assertFalse("foo" in model)
def test_delete_flexattr_via_dot(self):
model = ModelFixture1()
- model['foo'] = 'bar'
- self.assertTrue('foo' in model)
+ model["foo"] = "bar"
+ self.assertTrue("foo" in model)
del model.foo
- self.assertFalse('foo' in model)
+ self.assertFalse("foo" in model)
def test_delete_flexattr_persists(self):
model = ModelFixture1()
model.add(self.db)
- model.foo = 'bar'
+ model.foo = "bar"
model.store()
model = self.db._get(ModelFixture1, model.id)
- del model['foo']
+ del model["foo"]
model.store()
model = self.db._get(ModelFixture1, model.id)
- self.assertFalse('foo' in model)
+ self.assertFalse("foo" in model)
def test_delete_non_existent_attribute(self):
model = ModelFixture1()
with self.assertRaises(KeyError):
- del model['foo']
+ del model["foo"]
def test_delete_fixed_attribute(self):
model = ModelFixture5()
- model.some_string_field = 'foo'
+ model.some_string_field = "foo"
model.some_float_field = 1.23
model.some_boolean_field = True
@@ -375,22 +374,22 @@ def test_normalization_for_typed_flex_fields(self):
def test_load_deleted_flex_field(self):
model1 = ModelFixture1()
- model1['flex_field'] = True
+ model1["flex_field"] = True
model1.add(self.db)
model2 = self.db._get(ModelFixture1, model1.id)
- self.assertIn('flex_field', model2)
+ self.assertIn("flex_field", model2)
- del model1['flex_field']
+ del model1["flex_field"]
model1.store()
model2.load()
- self.assertNotIn('flex_field', model2)
+ self.assertNotIn("flex_field", model2)
def test_check_db_fails(self):
- with self.assertRaisesRegex(ValueError, 'no database'):
+ with self.assertRaisesRegex(ValueError, "no database"):
dbcore.Model()._check_db()
- with self.assertRaisesRegex(ValueError, 'no id'):
+ with self.assertRaisesRegex(ValueError, "no id"):
ModelFixture1(self.db)._check_db()
dbcore.Model(self.db)._check_db(need_id=False)
@@ -401,15 +400,16 @@ def test_missing_field(self):
def test_computed_field(self):
model = ModelFixtureWithGetters()
- self.assertEqual(model.aComputedField, 'thing')
- with self.assertRaisesRegex(KeyError, 'computed field .+ deleted'):
+ self.assertEqual(model.aComputedField, "thing")
+ with self.assertRaisesRegex(KeyError, "computed field .+ deleted"):
del model.aComputedField
def test_items(self):
model = ModelFixture1(self.db)
model.id = 5
- self.assertEqual({('id', 5), ('field_one', 0), ('field_two', '')},
- set(model.items()))
+ self.assertEqual(
+ {("id", 5), ("field_one", 0), ("field_two", "")}, set(model.items())
+ )
def test_delete_internal_field(self):
model = dbcore.Model()
@@ -426,50 +426,49 @@ class FormatTest(unittest.TestCase):
def test_format_fixed_field_integer(self):
model = ModelFixture1()
model.field_one = 155
- value = model.formatted().get('field_one')
- self.assertEqual(value, '155')
+ value = model.formatted().get("field_one")
+ self.assertEqual(value, "155")
def test_format_fixed_field_integer_normalized(self):
- """The normalize method of the Integer class rounds floats
- """
+ """The normalize method of the Integer class rounds floats"""
model = ModelFixture1()
model.field_one = 142.432
- value = model.formatted().get('field_one')
- self.assertEqual(value, '142')
+ value = model.formatted().get("field_one")
+ self.assertEqual(value, "142")
model.field_one = 142.863
- value = model.formatted().get('field_one')
- self.assertEqual(value, '143')
+ value = model.formatted().get("field_one")
+ self.assertEqual(value, "143")
def test_format_fixed_field_string(self):
model = ModelFixture1()
- model.field_two = 'caf\xe9'
- value = model.formatted().get('field_two')
- self.assertEqual(value, 'caf\xe9')
+ model.field_two = "caf\xe9"
+ value = model.formatted().get("field_two")
+ self.assertEqual(value, "caf\xe9")
def test_format_flex_field(self):
model = ModelFixture1()
- model.other_field = 'caf\xe9'
- value = model.formatted().get('other_field')
- self.assertEqual(value, 'caf\xe9')
+ model.other_field = "caf\xe9"
+ value = model.formatted().get("other_field")
+ self.assertEqual(value, "caf\xe9")
def test_format_flex_field_bytes(self):
model = ModelFixture1()
- model.other_field = 'caf\xe9'.encode()
- value = model.formatted().get('other_field')
+ model.other_field = "caf\xe9".encode()
+ value = model.formatted().get("other_field")
self.assertTrue(isinstance(value, str))
- self.assertEqual(value, 'caf\xe9')
+ self.assertEqual(value, "caf\xe9")
def test_format_unset_field(self):
model = ModelFixture1()
- value = model.formatted().get('other_field')
- self.assertEqual(value, '')
+ value = model.formatted().get("other_field")
+ self.assertEqual(value, "")
def test_format_typed_flex_field(self):
model = ModelFixture1()
model.some_float_field = 3.14159265358979
- value = model.formatted().get('some_float_field')
- self.assertEqual(value, '3.1')
+ value = model.formatted().get("some_float_field")
+ self.assertEqual(value, "3.1")
class FormattedMappingTest(unittest.TestCase):
@@ -482,91 +481,93 @@ def test_get_unset_field(self):
model = ModelFixture1()
formatted = model.formatted()
with self.assertRaises(KeyError):
- formatted['other_field']
+ formatted["other_field"]
def test_get_method_with_default(self):
model = ModelFixture1()
formatted = model.formatted()
- self.assertEqual(formatted.get('other_field'), '')
+ self.assertEqual(formatted.get("other_field"), "")
def test_get_method_with_specified_default(self):
model = ModelFixture1()
formatted = model.formatted()
- self.assertEqual(formatted.get('other_field', 'default'), 'default')
+ self.assertEqual(formatted.get("other_field", "default"), "default")
class ParseTest(unittest.TestCase):
def test_parse_fixed_field(self):
- value = ModelFixture1._parse('field_one', '2')
+ value = ModelFixture1._parse("field_one", "2")
self.assertIsInstance(value, int)
self.assertEqual(value, 2)
def test_parse_flex_field(self):
- value = ModelFixture1._parse('some_float_field', '2')
+ value = ModelFixture1._parse("some_float_field", "2")
self.assertIsInstance(value, float)
self.assertEqual(value, 2.0)
def test_parse_untyped_field(self):
- value = ModelFixture1._parse('field_nine', '2')
- self.assertEqual(value, '2')
+ value = ModelFixture1._parse("field_nine", "2")
+ self.assertEqual(value, "2")
class QueryParseTest(unittest.TestCase):
def pqp(self, part):
return dbcore.queryparse.parse_query_part(
part,
- {'year': dbcore.query.NumericQuery},
- {':': dbcore.query.RegexpQuery},
- )[:-1] # remove the negate flag
+ {"year": dbcore.query.NumericQuery},
+ {":": dbcore.query.RegexpQuery},
+ )[
+ :-1
+ ] # remove the negate flag
def test_one_basic_term(self):
- q = 'test'
- r = (None, 'test', dbcore.query.SubstringQuery)
+ q = "test"
+ r = (None, "test", dbcore.query.SubstringQuery)
self.assertEqual(self.pqp(q), r)
def test_one_keyed_term(self):
- q = 'test:val'
- r = ('test', 'val', dbcore.query.SubstringQuery)
+ q = "test:val"
+ r = ("test", "val", dbcore.query.SubstringQuery)
self.assertEqual(self.pqp(q), r)
def test_colon_at_end(self):
- q = 'test:'
- r = ('test', '', dbcore.query.SubstringQuery)
+ q = "test:"
+ r = ("test", "", dbcore.query.SubstringQuery)
self.assertEqual(self.pqp(q), r)
def test_one_basic_regexp(self):
- q = r':regexp'
- r = (None, 'regexp', dbcore.query.RegexpQuery)
+ q = r":regexp"
+ r = (None, "regexp", dbcore.query.RegexpQuery)
self.assertEqual(self.pqp(q), r)
def test_keyed_regexp(self):
- q = r'test::regexp'
- r = ('test', 'regexp', dbcore.query.RegexpQuery)
+ q = r"test::regexp"
+ r = ("test", "regexp", dbcore.query.RegexpQuery)
self.assertEqual(self.pqp(q), r)
def test_escaped_colon(self):
- q = r'test\:val'
- r = (None, 'test:val', dbcore.query.SubstringQuery)
+ q = r"test\:val"
+ r = (None, "test:val", dbcore.query.SubstringQuery)
self.assertEqual(self.pqp(q), r)
def test_escaped_colon_in_regexp(self):
- q = r':test\:regexp'
- r = (None, 'test:regexp', dbcore.query.RegexpQuery)
+ q = r":test\:regexp"
+ r = (None, "test:regexp", dbcore.query.RegexpQuery)
self.assertEqual(self.pqp(q), r)
def test_single_year(self):
- q = 'year:1999'
- r = ('year', '1999', dbcore.query.NumericQuery)
+ q = "year:1999"
+ r = ("year", "1999", dbcore.query.NumericQuery)
self.assertEqual(self.pqp(q), r)
def test_multiple_years(self):
- q = 'year:1999..2010'
- r = ('year', '1999..2010', dbcore.query.NumericQuery)
+ q = "year:1999..2010"
+ r = ("year", "1999..2010", dbcore.query.NumericQuery)
self.assertEqual(self.pqp(q), r)
def test_empty_query_part(self):
- q = ''
- r = (None, '', dbcore.query.SubstringQuery)
+ q = ""
+ r = (None, "", dbcore.query.SubstringQuery)
self.assertEqual(self.pqp(q), r)
@@ -575,7 +576,7 @@ def qfs(self, strings):
return dbcore.queryparse.query_from_strings(
dbcore.query.AndQuery,
ModelFixture1,
- {':': dbcore.query.RegexpQuery},
+ {":": dbcore.query.RegexpQuery},
strings,
)
@@ -586,26 +587,26 @@ def test_zero_parts(self):
self.assertIsInstance(q.subqueries[0], dbcore.query.TrueQuery)
def test_two_parts(self):
- q = self.qfs(['foo', 'bar:baz'])
+ q = self.qfs(["foo", "bar:baz"])
self.assertIsInstance(q, dbcore.query.AndQuery)
self.assertEqual(len(q.subqueries), 2)
self.assertIsInstance(q.subqueries[0], dbcore.query.AnyFieldQuery)
self.assertIsInstance(q.subqueries[1], dbcore.query.SubstringQuery)
def test_parse_fixed_type_query(self):
- q = self.qfs(['field_one:2..3'])
+ q = self.qfs(["field_one:2..3"])
self.assertIsInstance(q.subqueries[0], dbcore.query.NumericQuery)
def test_parse_flex_type_query(self):
- q = self.qfs(['some_float_field:2..3'])
+ q = self.qfs(["some_float_field:2..3"])
self.assertIsInstance(q.subqueries[0], dbcore.query.NumericQuery)
def test_empty_query_part(self):
- q = self.qfs([''])
+ q = self.qfs([""])
self.assertIsInstance(q.subqueries[0], dbcore.query.TrueQuery)
def test_parse_named_query(self):
- q = self.qfs(['some_query:foo'])
+ q = self.qfs(["some_query:foo"])
self.assertIsInstance(q.subqueries[0], QueryFixture)
@@ -622,26 +623,26 @@ def test_zero_parts(self):
self.assertEqual(s, dbcore.query.NullSort())
def test_one_parts(self):
- s = self.sfs(['field+'])
+ s = self.sfs(["field+"])
self.assertIsInstance(s, dbcore.query.Sort)
def test_two_parts(self):
- s = self.sfs(['field+', 'another_field-'])
+ s = self.sfs(["field+", "another_field-"])
self.assertIsInstance(s, dbcore.query.MultipleSort)
self.assertEqual(len(s.sorts), 2)
def test_fixed_field_sort(self):
- s = self.sfs(['field_one+'])
+ s = self.sfs(["field_one+"])
self.assertIsInstance(s, dbcore.query.FixedFieldSort)
- self.assertEqual(s, dbcore.query.FixedFieldSort('field_one'))
+ self.assertEqual(s, dbcore.query.FixedFieldSort("field_one"))
def test_flex_field_sort(self):
- s = self.sfs(['flex_field+'])
+ s = self.sfs(["flex_field+"])
self.assertIsInstance(s, dbcore.query.SlowFieldSort)
- self.assertEqual(s, dbcore.query.SlowFieldSort('flex_field'))
+ self.assertEqual(s, dbcore.query.SlowFieldSort("flex_field"))
def test_special_sort(self):
- s = self.sfs(['some_sort+'])
+ s = self.sfs(["some_sort+"])
self.assertIsInstance(s, SortFixture)
@@ -653,43 +654,43 @@ def psq(self, parts):
)
def test_and_query(self):
- q, s = self.psq('foo bar')
+ q, s = self.psq("foo bar")
self.assertIsInstance(q, dbcore.query.AndQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 2)
def test_or_query(self):
- q, s = self.psq('foo , bar')
+ q, s = self.psq("foo , bar")
self.assertIsInstance(q, dbcore.query.OrQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 2)
def test_no_space_before_comma_or_query(self):
- q, s = self.psq('foo, bar')
+ q, s = self.psq("foo, bar")
self.assertIsInstance(q, dbcore.query.OrQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 2)
def test_no_spaces_or_query(self):
- q, s = self.psq('foo,bar')
+ q, s = self.psq("foo,bar")
self.assertIsInstance(q, dbcore.query.AndQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 1)
def test_trailing_comma_or_query(self):
- q, s = self.psq('foo , bar ,')
+ q, s = self.psq("foo , bar ,")
self.assertIsInstance(q, dbcore.query.OrQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 3)
def test_leading_comma_or_query(self):
- q, s = self.psq(', foo , bar')
+ q, s = self.psq(", foo , bar")
self.assertIsInstance(q, dbcore.query.OrQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 3)
def test_only_direction(self):
- q, s = self.psq('-')
+ q, s = self.psq("-")
self.assertIsInstance(q, dbcore.query.AndQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 1)
@@ -697,12 +698,12 @@ def test_only_direction(self):
class ResultsIteratorTest(unittest.TestCase):
def setUp(self):
- self.db = DatabaseFixture1(':memory:')
+ self.db = DatabaseFixture1(":memory:")
model = ModelFixture1()
- model['foo'] = 'baz'
+ model["foo"] = "baz"
model.add(self.db)
model = ModelFixture1()
- model['foo'] = 'bar'
+ model["foo"] = "bar"
model.add(self.db)
def tearDown(self):
@@ -726,32 +727,32 @@ def test_concurrent_iterators(self):
self.assertEqual(len(list(it1)), 1)
def test_slow_query(self):
- q = dbcore.query.SubstringQuery('foo', 'ba', False)
+ q = dbcore.query.SubstringQuery("foo", "ba", False)
objs = self.db._fetch(ModelFixture1, q)
self.assertEqual(len(list(objs)), 2)
def test_slow_query_negative(self):
- q = dbcore.query.SubstringQuery('foo', 'qux', False)
+ q = dbcore.query.SubstringQuery("foo", "qux", False)
objs = self.db._fetch(ModelFixture1, q)
self.assertEqual(len(list(objs)), 0)
def test_iterate_slow_sort(self):
- s = dbcore.query.SlowFieldSort('foo')
+ s = dbcore.query.SlowFieldSort("foo")
res = self.db._fetch(ModelFixture1, sort=s)
objs = list(res)
- self.assertEqual(objs[0].foo, 'bar')
- self.assertEqual(objs[1].foo, 'baz')
+ self.assertEqual(objs[0].foo, "bar")
+ self.assertEqual(objs[1].foo, "baz")
def test_unsorted_subscript(self):
objs = self.db._fetch(ModelFixture1)
- self.assertEqual(objs[0].foo, 'baz')
- self.assertEqual(objs[1].foo, 'bar')
+ self.assertEqual(objs[0].foo, "baz")
+ self.assertEqual(objs[1].foo, "bar")
def test_slow_sort_subscript(self):
- s = dbcore.query.SlowFieldSort('foo')
+ s = dbcore.query.SlowFieldSort("foo")
objs = self.db._fetch(ModelFixture1, sort=s)
- self.assertEqual(objs[0].foo, 'bar')
- self.assertEqual(objs[1].foo, 'baz')
+ self.assertEqual(objs[0].foo, "bar")
+ self.assertEqual(objs[1].foo, "baz")
def test_length(self):
objs = self.db._fetch(ModelFixture1)
@@ -763,13 +764,14 @@ def test_out_of_range(self):
objs[100]
def test_no_results(self):
- self.assertIsNone(self.db._fetch(
- ModelFixture1, dbcore.query.FalseQuery()).get())
+ self.assertIsNone(
+ self.db._fetch(ModelFixture1, dbcore.query.FalseQuery()).get()
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_files.py b/test/test_files.py
index c96791d680..d57c857475 100644
--- a/test/test_files.py
+++ b/test/test_files.py
@@ -15,14 +15,14 @@
"""Test file manipulation functionality of Item.
"""
-import shutil
import os
+import shutil
import stat
-from os.path import join
import unittest
-
+from os.path import join
from test import _common
from test._common import item, touch
+
import beets.library
from beets import util
from beets.util import MoveOperation, bytestring_path, syspath
@@ -33,29 +33,30 @@ def setUp(self):
super().setUp()
# make a temporary file
- self.path = join(self.temp_dir, b'temp.mp3')
+ self.path = join(self.temp_dir, b"temp.mp3")
shutil.copy(
- syspath(join(_common.RSRC, b'full.mp3')),
+ syspath(join(_common.RSRC, b"full.mp3")),
syspath(self.path),
)
# add it to a temporary library
- self.lib = beets.library.Library(':memory:')
+ self.lib = beets.library.Library(":memory:")
self.i = beets.library.Item.from_path(self.path)
self.lib.add(self.i)
# set up the destination
- self.libdir = join(self.temp_dir, b'testlibdir')
+ self.libdir = join(self.temp_dir, b"testlibdir")
os.mkdir(syspath(self.libdir))
self.lib.directory = self.libdir
- self.lib.path_formats = [('default',
- join('$artist', '$album', '$title'))]
- self.i.artist = 'one'
- self.i.album = 'two'
- self.i.title = 'three'
- self.dest = join(self.libdir, b'one', b'two', b'three.mp3')
+ self.lib.path_formats = [
+ ("default", join("$artist", "$album", "$title"))
+ ]
+ self.i.artist = "one"
+ self.i.album = "two"
+ self.i.title = "three"
+ self.dest = join(self.libdir, b"one", b"two", b"three.mp3")
- self.otherdir = join(self.temp_dir, b'testotherdir')
+ self.otherdir = join(self.temp_dir, b"testotherdir")
def test_move_arrives(self):
self.i.move()
@@ -63,7 +64,7 @@ def test_move_arrives(self):
def test_move_to_custom_dir(self):
self.i.move(basedir=self.otherdir)
- self.assertExists(join(self.otherdir, b'one', b'two', b'three.mp3'))
+ self.assertExists(join(self.otherdir, b"one", b"two", b"three.mp3"))
def test_move_departs(self):
self.i.move()
@@ -74,7 +75,7 @@ def test_move_in_lib_prunes_empty_dir(self):
old_path = self.i.path
self.assertExists(old_path)
- self.i.artist = 'newArtist'
+ self.i.artist = "newArtist"
self.i.move()
self.assertNotExists(old_path)
self.assertNotExists(os.path.dirname(old_path))
@@ -122,22 +123,22 @@ def test_move_already_at_destination(self):
self.assertEqual(self.i.path, old_path)
def test_move_file_with_colon(self):
- self.i.artist = 'C:DOS'
+ self.i.artist = "C:DOS"
self.i.move()
- self.assertIn('C_DOS', self.i.path.decode())
+ self.assertIn("C_DOS", self.i.path.decode())
def test_move_file_with_multiple_colons(self):
- print(beets.config['replace'])
- self.i.artist = 'COM:DOS'
+ print(beets.config["replace"])
+ self.i.artist = "COM:DOS"
self.i.move()
- self.assertIn('COM_DOS', self.i.path.decode())
+ self.assertIn("COM_DOS", self.i.path.decode())
def test_move_file_with_colon_alt_separator(self):
- old = beets.config['drive_sep_replace']
- beets.config["drive_sep_replace"] = '0'
- self.i.artist = 'C:DOS'
+ old = beets.config["drive_sep_replace"]
+ beets.config["drive_sep_replace"] = "0"
+ self.i.artist = "C:DOS"
self.i.move()
- self.assertIn('C0DOS', self.i.path.decode())
+ self.assertIn("C0DOS", self.i.path.decode())
beets.config["drive_sep_replace"] = old
def test_read_only_file_copied_writable(self):
@@ -160,8 +161,7 @@ def test_move_avoids_collision_with_existing_file(self):
self.i.move()
self.assertNotEqual(self.i.path, dest)
- self.assertEqual(os.path.dirname(self.i.path),
- os.path.dirname(dest))
+ self.assertEqual(os.path.dirname(self.i.path), os.path.dirname(dest))
@unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks")
def test_link_arrives(self):
@@ -190,8 +190,8 @@ def test_hardlink_arrives(self):
s1 = os.stat(syspath(self.path))
s2 = os.stat(syspath(self.dest))
self.assertTrue(
- (s1[stat.ST_INO], s1[stat.ST_DEV]) ==
- (s2[stat.ST_INO], s2[stat.ST_DEV])
+ (s1[stat.ST_INO], s1[stat.ST_DEV])
+ == (s2[stat.ST_INO], s2[stat.ST_DEV])
)
@unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks")
@@ -207,38 +207,38 @@ def test_hardlink_changes_path(self):
class HelperTest(_common.TestCase):
def test_ancestry_works_on_file(self):
- p = '/a/b/c'
- a = ['/', '/a', '/a/b']
+ p = "/a/b/c"
+ a = ["/", "/a", "/a/b"]
self.assertEqual(util.ancestry(p), a)
def test_ancestry_works_on_dir(self):
- p = '/a/b/c/'
- a = ['/', '/a', '/a/b', '/a/b/c']
+ p = "/a/b/c/"
+ a = ["/", "/a", "/a/b", "/a/b/c"]
self.assertEqual(util.ancestry(p), a)
def test_ancestry_works_on_relative(self):
- p = 'a/b/c'
- a = ['a', 'a/b']
+ p = "a/b/c"
+ a = ["a", "a/b"]
self.assertEqual(util.ancestry(p), a)
def test_components_works_on_file(self):
- p = '/a/b/c'
- a = ['/', 'a', 'b', 'c']
+ p = "/a/b/c"
+ a = ["/", "a", "b", "c"]
self.assertEqual(util.components(p), a)
def test_components_works_on_dir(self):
- p = '/a/b/c/'
- a = ['/', 'a', 'b', 'c']
+ p = "/a/b/c/"
+ a = ["/", "a", "b", "c"]
self.assertEqual(util.components(p), a)
def test_components_works_on_relative(self):
- p = 'a/b/c'
- a = ['a', 'b', 'c']
+ p = "a/b/c"
+ a = ["a", "b", "c"]
self.assertEqual(util.components(p), a)
def test_forward_slash(self):
- p = br'C:\a\b\c'
- a = br'C:/a/b/c'
+ p = rb"C:\a\b\c"
+ a = rb"C:/a/b/c"
self.assertEqual(util.path_as_posix(p), a)
@@ -247,10 +247,11 @@ def setUp(self):
super().setUp()
# Make library and item.
- self.lib = beets.library.Library(':memory:')
- self.lib.path_formats = \
- [('default', join('$albumartist', '$album', '$title'))]
- self.libdir = os.path.join(self.temp_dir, b'testlibdir')
+ self.lib = beets.library.Library(":memory:")
+ self.lib.path_formats = [
+ ("default", join("$albumartist", "$album", "$title"))
+ ]
+ self.libdir = os.path.join(self.temp_dir, b"testlibdir")
self.lib.directory = self.libdir
self.i = item(self.lib)
# Make a file for the item.
@@ -260,19 +261,19 @@ def setUp(self):
# Make an album.
self.ai = self.lib.add_album((self.i,))
# Alternate destination dir.
- self.otherdir = os.path.join(self.temp_dir, b'testotherdir')
+ self.otherdir = os.path.join(self.temp_dir, b"testotherdir")
def test_albuminfo_move_changes_paths(self):
- self.ai.album = 'newAlbumName'
+ self.ai.album = "newAlbumName"
self.ai.move()
self.ai.store()
self.i.load()
- self.assertTrue(b'newAlbumName' in self.i.path)
+ self.assertTrue(b"newAlbumName" in self.i.path)
def test_albuminfo_move_moves_file(self):
oldpath = self.i.path
- self.ai.album = 'newAlbumName'
+ self.ai.album = "newAlbumName"
self.ai.move()
self.ai.store()
self.i.load()
@@ -282,7 +283,7 @@ def test_albuminfo_move_moves_file(self):
def test_albuminfo_move_copies_file(self):
oldpath = self.i.path
- self.ai.album = 'newAlbumName'
+ self.ai.album = "newAlbumName"
self.ai.move(operation=MoveOperation.COPY)
self.ai.store()
self.i.load()
@@ -293,7 +294,7 @@ def test_albuminfo_move_copies_file(self):
@unittest.skipUnless(_common.HAVE_REFLINK, "need reflink")
def test_albuminfo_move_reflinks_file(self):
oldpath = self.i.path
- self.ai.album = 'newAlbumName'
+ self.ai.album = "newAlbumName"
self.ai.move(operation=MoveOperation.REFLINK)
self.ai.store()
self.i.load()
@@ -305,7 +306,7 @@ def test_albuminfo_move_to_custom_dir(self):
self.ai.move(basedir=self.otherdir)
self.i.load()
self.ai.store()
- self.assertTrue(b'testotherdir' in self.i.path)
+ self.assertTrue(b"testotherdir" in self.i.path)
class ArtFileTest(_common.TestCase):
@@ -313,8 +314,8 @@ def setUp(self):
super().setUp()
# Make library and item.
- self.lib = beets.library.Library(':memory:')
- self.libdir = os.path.join(self.temp_dir, b'testlibdir')
+ self.lib = beets.library.Library(":memory:")
+ self.libdir = os.path.join(self.temp_dir, b"testlibdir")
self.lib.directory = self.libdir
self.i = item(self.lib)
self.i.path = self.i.destination()
@@ -324,12 +325,12 @@ def setUp(self):
# Make an album.
self.ai = self.lib.add_album((self.i,))
# Make an art file too.
- self.art = self.lib.get_album(self.i).art_destination('something.jpg')
+ self.art = self.lib.get_album(self.i).art_destination("something.jpg")
touch(self.art)
self.ai.artpath = self.art
self.ai.store()
# Alternate destination dir.
- self.otherdir = os.path.join(self.temp_dir, b'testotherdir')
+ self.otherdir = os.path.join(self.temp_dir, b"testotherdir")
def test_art_deleted_when_items_deleted(self):
self.assertExists(self.art)
@@ -339,7 +340,7 @@ def test_art_deleted_when_items_deleted(self):
def test_art_moves_with_album(self):
self.assertExists(self.art)
oldpath = self.i.path
- self.ai.album = 'newAlbum'
+ self.ai.album = "newAlbum"
self.ai.move()
self.i.load()
@@ -358,16 +359,16 @@ def test_art_moves_with_album_to_custom_dir(self):
self.assertNotExists(self.art)
newart = self.lib.get_album(self.i).artpath
self.assertExists(newart)
- self.assertTrue(b'testotherdir' in newart)
+ self.assertTrue(b"testotherdir" in newart)
def test_setart_copies_image(self):
util.remove(self.art)
- newart = os.path.join(self.libdir, b'newart.jpg')
+ newart = os.path.join(self.libdir, b"newart.jpg")
touch(newart)
i2 = item()
i2.path = self.i.path
- i2.artist = 'someArtist'
+ i2.artist = "someArtist"
ai = self.lib.add_album((i2,))
i2.move(operation=MoveOperation.COPY)
@@ -379,11 +380,11 @@ def test_setart_to_existing_art_works(self):
util.remove(self.art)
# Original art.
- newart = os.path.join(self.libdir, b'newart.jpg')
+ newart = os.path.join(self.libdir, b"newart.jpg")
touch(newart)
i2 = item()
i2.path = self.i.path
- i2.artist = 'someArtist'
+ i2.artist = "someArtist"
ai = self.lib.add_album((i2,))
i2.move(operation=MoveOperation.COPY)
ai.set_art(newart)
@@ -393,11 +394,11 @@ def test_setart_to_existing_art_works(self):
self.assertExists(ai.artpath)
def test_setart_to_existing_but_unset_art_works(self):
- newart = os.path.join(self.libdir, b'newart.jpg')
+ newart = os.path.join(self.libdir, b"newart.jpg")
touch(newart)
i2 = item()
i2.path = self.i.path
- i2.artist = 'someArtist'
+ i2.artist = "someArtist"
ai = self.lib.add_album((i2,))
i2.move(operation=MoveOperation.COPY)
@@ -410,11 +411,11 @@ def test_setart_to_existing_but_unset_art_works(self):
self.assertExists(ai.artpath)
def test_setart_to_conflicting_file_gets_new_path(self):
- newart = os.path.join(self.libdir, b'newart.jpg')
+ newart = os.path.join(self.libdir, b"newart.jpg")
touch(newart)
i2 = item()
i2.path = self.i.path
- i2.artist = 'someArtist'
+ i2.artist = "someArtist"
ai = self.lib.add_album((i2,))
i2.move(operation=MoveOperation.COPY)
@@ -425,20 +426,19 @@ def test_setart_to_conflicting_file_gets_new_path(self):
# Set the art.
ai.set_art(newart)
self.assertNotEqual(artdest, ai.artpath)
- self.assertEqual(os.path.dirname(artdest),
- os.path.dirname(ai.artpath))
+ self.assertEqual(os.path.dirname(artdest), os.path.dirname(ai.artpath))
def test_setart_sets_permissions(self):
util.remove(self.art)
- newart = os.path.join(self.libdir, b'newart.jpg')
+ newart = os.path.join(self.libdir, b"newart.jpg")
touch(newart)
os.chmod(syspath(newart), 0o400) # read-only
try:
i2 = item()
i2.path = self.i.path
- i2.artist = 'someArtist'
+ i2.artist = "someArtist"
ai = self.lib.add_album((i2,))
i2.move(operation=MoveOperation.COPY)
ai.set_art(newart)
@@ -456,12 +456,12 @@ def test_move_last_file_moves_albumart(self):
oldartpath = self.lib.albums()[0].artpath
self.assertExists(oldartpath)
- self.ai.album = 'different_album'
+ self.ai.album = "different_album"
self.ai.store()
self.ai.items()[0].move()
artpath = self.lib.albums()[0].artpath
- self.assertTrue(b'different_album' in artpath)
+ self.assertTrue(b"different_album" in artpath)
self.assertExists(artpath)
self.assertNotExists(oldartpath)
@@ -473,12 +473,12 @@ def test_move_not_last_file_does_not_move_albumart(self):
oldartpath = self.lib.albums()[0].artpath
self.assertExists(oldartpath)
- self.i.album = 'different_album'
+ self.i.album = "different_album"
self.i.album_id = None # detach from album
self.i.move()
artpath = self.lib.albums()[0].artpath
- self.assertFalse(b'different_album' in artpath)
+ self.assertFalse(b"different_album" in artpath)
self.assertEqual(artpath, oldartpath)
self.assertExists(oldartpath)
@@ -488,8 +488,8 @@ def setUp(self):
super().setUp()
# Make library and item.
- self.lib = beets.library.Library(':memory:')
- self.libdir = os.path.join(self.temp_dir, b'testlibdir')
+ self.lib = beets.library.Library(":memory:")
+ self.libdir = os.path.join(self.temp_dir, b"testlibdir")
self.lib.directory = self.libdir
self.i = item(self.lib)
self.i.path = self.i.destination()
@@ -507,13 +507,13 @@ def test_removing_last_item_prunes_empty_dir(self):
def test_removing_last_item_preserves_nonempty_dir(self):
parent = os.path.dirname(self.i.path)
- touch(os.path.join(parent, b'dummy.txt'))
+ touch(os.path.join(parent, b"dummy.txt"))
self.i.remove(True)
self.assertExists(parent)
def test_removing_last_item_prunes_dir_with_blacklisted_file(self):
parent = os.path.dirname(self.i.path)
- touch(os.path.join(parent, b'.DS_Store'))
+ touch(os.path.join(parent, b".DS_Store"))
self.i.remove(True)
self.assertNotExists(parent)
@@ -527,13 +527,13 @@ def test_removing_last_item_preserves_library_dir(self):
self.assertExists(self.libdir)
def test_removing_item_outside_of_library_deletes_nothing(self):
- self.lib.directory = os.path.join(self.temp_dir, b'xxx')
+ self.lib.directory = os.path.join(self.temp_dir, b"xxx")
parent = os.path.dirname(self.i.path)
self.i.remove(True)
self.assertExists(parent)
def test_removing_last_item_in_album_with_albumart_prunes_dir(self):
- artfile = os.path.join(self.temp_dir, b'testart.jpg')
+ artfile = os.path.join(self.temp_dir, b"testart.jpg")
touch(artfile)
self.ai.set_art(artfile)
self.ai.store()
@@ -548,7 +548,7 @@ class SoftRemoveTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.path = os.path.join(self.temp_dir, b'testfile')
+ self.path = os.path.join(self.temp_dir, b"testfile")
touch(self.path)
def test_soft_remove_deletes_file(self):
@@ -557,20 +557,20 @@ def test_soft_remove_deletes_file(self):
def test_soft_remove_silent_on_no_file(self):
try:
- util.remove(self.path + b'XXX', True)
+ util.remove(self.path + b"XXX", True)
except OSError:
- self.fail('OSError when removing path')
+ self.fail("OSError when removing path")
class SafeMoveCopyTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.path = os.path.join(self.temp_dir, b'testfile')
+ self.path = os.path.join(self.temp_dir, b"testfile")
touch(self.path)
- self.otherpath = os.path.join(self.temp_dir, b'testfile2')
+ self.otherpath = os.path.join(self.temp_dir, b"testfile2")
touch(self.otherpath)
- self.dest = self.path + b'.dest'
+ self.dest = self.path + b".dest"
def test_successful_move(self):
util.move(self.path, self.dest)
@@ -614,9 +614,9 @@ class PruneTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.base = os.path.join(self.temp_dir, b'testdir')
+ self.base = os.path.join(self.temp_dir, b"testdir")
os.mkdir(syspath(self.base))
- self.sub = os.path.join(self.base, b'subdir')
+ self.sub = os.path.join(self.base, b"subdir")
os.mkdir(syspath(self.sub))
def test_prune_existent_directory(self):
@@ -625,7 +625,7 @@ def test_prune_existent_directory(self):
self.assertNotExists(self.sub)
def test_prune_nonexistent_directory(self):
- util.prune_dirs(os.path.join(self.sub, b'another'), self.base)
+ util.prune_dirs(os.path.join(self.sub, b"another"), self.base)
self.assertExists(self.base)
self.assertNotExists(self.sub)
@@ -634,78 +634,72 @@ class WalkTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.base = os.path.join(self.temp_dir, b'testdir')
+ self.base = os.path.join(self.temp_dir, b"testdir")
os.mkdir(syspath(self.base))
- touch(os.path.join(self.base, b'y'))
- touch(os.path.join(self.base, b'x'))
- os.mkdir(syspath(os.path.join(self.base, b'd')))
- touch(os.path.join(self.base, b'd', b'z'))
+ touch(os.path.join(self.base, b"y"))
+ touch(os.path.join(self.base, b"x"))
+ os.mkdir(syspath(os.path.join(self.base, b"d")))
+ touch(os.path.join(self.base, b"d", b"z"))
def test_sorted_files(self):
res = list(util.sorted_walk(self.base))
self.assertEqual(len(res), 2)
- self.assertEqual(res[0],
- (self.base, [b'd'], [b'x', b'y']))
- self.assertEqual(res[1],
- (os.path.join(self.base, b'd'), [], [b'z']))
+ self.assertEqual(res[0], (self.base, [b"d"], [b"x", b"y"]))
+ self.assertEqual(res[1], (os.path.join(self.base, b"d"), [], [b"z"]))
def test_ignore_file(self):
- res = list(util.sorted_walk(self.base, (b'x',)))
+ res = list(util.sorted_walk(self.base, (b"x",)))
self.assertEqual(len(res), 2)
- self.assertEqual(res[0],
- (self.base, [b'd'], [b'y']))
- self.assertEqual(res[1],
- (os.path.join(self.base, b'd'), [], [b'z']))
+ self.assertEqual(res[0], (self.base, [b"d"], [b"y"]))
+ self.assertEqual(res[1], (os.path.join(self.base, b"d"), [], [b"z"]))
def test_ignore_directory(self):
- res = list(util.sorted_walk(self.base, (b'd',)))
+ res = list(util.sorted_walk(self.base, (b"d",)))
self.assertEqual(len(res), 1)
- self.assertEqual(res[0],
- (self.base, [], [b'x', b'y']))
+ self.assertEqual(res[0], (self.base, [], [b"x", b"y"]))
def test_ignore_everything(self):
- res = list(util.sorted_walk(self.base, (b'*',)))
+ res = list(util.sorted_walk(self.base, (b"*",)))
self.assertEqual(len(res), 1)
- self.assertEqual(res[0],
- (self.base, [], []))
+ self.assertEqual(res[0], (self.base, [], []))
class UniquePathTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.base = os.path.join(self.temp_dir, b'testdir')
+ self.base = os.path.join(self.temp_dir, b"testdir")
os.mkdir(syspath(self.base))
- touch(os.path.join(self.base, b'x.mp3'))
- touch(os.path.join(self.base, b'x.1.mp3'))
- touch(os.path.join(self.base, b'x.2.mp3'))
- touch(os.path.join(self.base, b'y.mp3'))
+ touch(os.path.join(self.base, b"x.mp3"))
+ touch(os.path.join(self.base, b"x.1.mp3"))
+ touch(os.path.join(self.base, b"x.2.mp3"))
+ touch(os.path.join(self.base, b"y.mp3"))
def test_new_file_unchanged(self):
- path = util.unique_path(os.path.join(self.base, b'z.mp3'))
- self.assertEqual(path, os.path.join(self.base, b'z.mp3'))
+ path = util.unique_path(os.path.join(self.base, b"z.mp3"))
+ self.assertEqual(path, os.path.join(self.base, b"z.mp3"))
def test_conflicting_file_appends_1(self):
- path = util.unique_path(os.path.join(self.base, b'y.mp3'))
- self.assertEqual(path, os.path.join(self.base, b'y.1.mp3'))
+ path = util.unique_path(os.path.join(self.base, b"y.mp3"))
+ self.assertEqual(path, os.path.join(self.base, b"y.1.mp3"))
def test_conflicting_file_appends_higher_number(self):
- path = util.unique_path(os.path.join(self.base, b'x.mp3'))
- self.assertEqual(path, os.path.join(self.base, b'x.3.mp3'))
+ path = util.unique_path(os.path.join(self.base, b"x.mp3"))
+ self.assertEqual(path, os.path.join(self.base, b"x.3.mp3"))
def test_conflicting_file_with_number_increases_number(self):
- path = util.unique_path(os.path.join(self.base, b'x.1.mp3'))
- self.assertEqual(path, os.path.join(self.base, b'x.3.mp3'))
+ path = util.unique_path(os.path.join(self.base, b"x.1.mp3"))
+ self.assertEqual(path, os.path.join(self.base, b"x.3.mp3"))
class MkDirAllTest(_common.TestCase):
def test_parent_exists(self):
- path = os.path.join(self.temp_dir, b'foo', b'bar', b'baz', b'qux.mp3')
+ path = os.path.join(self.temp_dir, b"foo", b"bar", b"baz", b"qux.mp3")
util.mkdirall(path)
- self.assertIsDir(os.path.join(self.temp_dir, b'foo', b'bar', b'baz'))
+ self.assertIsDir(os.path.join(self.temp_dir, b"foo", b"bar", b"baz"))
def test_child_does_not_exist(self):
- path = os.path.join(self.temp_dir, b'foo', b'bar', b'baz', b'qux.mp3')
+ path = os.path.join(self.temp_dir, b"foo", b"bar", b"baz", b"qux.mp3")
util.mkdirall(path)
self.assertNotExists(path)
@@ -713,5 +707,6 @@ def test_child_does_not_exist(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_hidden.py b/test/test_hidden.py
index 9bce048c12..f60f1f6e94 100644
--- a/test/test_hidden.py
+++ b/test/test_hidden.py
@@ -15,14 +15,15 @@
"""Tests for the 'hidden' utility."""
-import unittest
+import ctypes
+import errno
+import subprocess
import sys
import tempfile
-from beets.util import hidden
+import unittest
+
from beets import util
-import subprocess
-import errno
-import ctypes
+from beets.util import hidden
class HiddenFileTest(unittest.TestCase):
@@ -30,8 +31,8 @@ def setUp(self):
pass
def test_osx_hidden(self):
- if not sys.platform == 'darwin':
- self.skipTest('sys.platform is not darwin')
+ if not sys.platform == "darwin":
+ self.skipTest("sys.platform is not darwin")
return
with tempfile.NamedTemporaryFile(delete=False) as f:
@@ -47,8 +48,8 @@ def test_osx_hidden(self):
self.assertTrue(hidden.is_hidden(f.name))
def test_windows_hidden(self):
- if not sys.platform == 'win32':
- self.skipTest('sys.platform is not windows')
+ if not sys.platform == "win32":
+ self.skipTest("sys.platform is not windows")
return
# FILE_ATTRIBUTE_HIDDEN = 2 (0x2) from GetFileAttributes documentation.
@@ -56,8 +57,9 @@ def test_windows_hidden(self):
with tempfile.NamedTemporaryFile() as f:
# Hide the file using
- success = ctypes.windll.kernel32.SetFileAttributesW(f.name,
- hidden_mask)
+ success = ctypes.windll.kernel32.SetFileAttributesW(
+ f.name, hidden_mask
+ )
if not success:
self.skipTest("unable to set file attributes")
@@ -65,11 +67,11 @@ def test_windows_hidden(self):
self.assertTrue(hidden.is_hidden(f.name))
def test_other_hidden(self):
- if sys.platform == 'darwin' or sys.platform == 'win32':
- self.skipTest('sys.platform is known')
+ if sys.platform == "darwin" or sys.platform == "win32":
+ self.skipTest("sys.platform is known")
return
- with tempfile.NamedTemporaryFile(prefix='.tmp') as f:
+ with tempfile.NamedTemporaryFile(prefix=".tmp") as f:
fn = util.bytestring_path(f.name)
self.assertTrue(hidden.is_hidden(fn))
@@ -77,5 +79,6 @@ def test_other_hidden(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_importer.py b/test/test_importer.py
index 79c51b1904..9b76d5038a 100644
--- a/test/test_importer.py
+++ b/test/test_importer.py
@@ -18,28 +18,29 @@
import os
import re
import shutil
-import unicodedata
-import sys
import stat
+import sys
+import unicodedata
+import unittest
from io import StringIO
+from tarfile import TarFile
from tempfile import mkstemp
+from test import _common
+from test.helper import (
+ ImportSessionFixture,
+ TestHelper,
+ capture_log,
+ has_program,
+)
+from unittest.mock import Mock, patch
from zipfile import ZipFile
-from tarfile import TarFile
-from unittest.mock import patch, Mock
-import unittest
-from test import _common
-from test.helper import TestHelper, has_program, capture_log
-from test.helper import ImportSessionFixture
-from beets import importer
-from beets.importer import albums_in_dir
from mediafile import MediaFile
-from beets import autotag
-from beets.autotag import AlbumInfo, TrackInfo, AlbumMatch
-from beets import config
-from beets import logging
-from beets import util
-from beets.util import displayable_path, bytestring_path, py3_path, syspath
+
+from beets import autotag, config, importer, logging, util
+from beets.autotag import AlbumInfo, AlbumMatch, TrackInfo
+from beets.importer import albums_in_dir
+from beets.util import bytestring_path, displayable_path, py3_path, syspath
class AutotagStub:
@@ -47,11 +48,11 @@ class AutotagStub:
autotagger returns.
"""
- NONE = 'NONE'
- IDENT = 'IDENT'
- GOOD = 'GOOD'
- BAD = 'BAD'
- MISSING = 'MISSING'
+ NONE = "NONE"
+ IDENT = "IDENT"
+ GOOD = "GOOD"
+ BAD = "BAD"
+ MISSING = "MISSING"
"""Generate an album match for all but one track
"""
@@ -94,10 +95,10 @@ def match_album(self, albumartist, album, tracks, extra_tags):
def match_track(self, artist, title):
yield TrackInfo(
- title=title.replace('Tag', 'Applied'),
- track_id='trackid',
- artist=artist.replace('Tag', 'Applied'),
- artist_id='artistid',
+ title=title.replace("Tag", "Applied"),
+ track_id="trackid",
+ artist=artist.replace("Tag", "Applied"),
+ artist_id="artistid",
length=1,
index=0,
)
@@ -110,8 +111,8 @@ def track_for_id(self, mbid):
def _make_track_match(self, artist, album, number):
return TrackInfo(
- title='Applied Title %d' % number,
- track_id='match %d' % number,
+ title="Applied Title %d" % number,
+ track_id="match %d" % number,
artist=artist,
length=1,
index=0,
@@ -119,14 +120,14 @@ def _make_track_match(self, artist, album, number):
def _make_album_match(self, artist, album, tracks, distance=0, missing=0):
if distance:
- id = ' ' + 'M' * distance
+ id = " " + "M" * distance
else:
- id = ''
+ id = ""
if artist is None:
artist = "Various Artists"
else:
- artist = artist.replace('Tag', 'Applied') + id
- album = album.replace('Tag', 'Applied') + id
+ artist = artist.replace("Tag", "Applied") + id
+ album = album.replace("Tag", "Applied") + id
track_infos = []
for i in range(tracks - missing):
@@ -137,10 +138,10 @@ def _make_album_match(self, artist, album, tracks, distance=0, missing=0):
album=album,
tracks=track_infos,
va=False,
- album_id='albumid' + id,
- artist_id='artistid' + id,
- albumtype='soundtrack',
- data_source='match_source'
+ album_id="albumid" + id,
+ artist_id="artistid" + id,
+ albumtype="soundtrack",
+ data_source="match_source",
)
@@ -153,9 +154,9 @@ class ImportHelper(TestHelper):
def setup_beets(self, disk=False):
super().setup_beets(disk)
self.lib.path_formats = [
- ('default', os.path.join('$artist', '$album', '$title')),
- ('singleton:true', os.path.join('singletons', '$title')),
- ('comp:true', os.path.join('compilations', '$album', '$title')),
+ ("default", os.path.join("$artist", "$album", "$title")),
+ ("singleton:true", os.path.join("singletons", "$title")),
+ ("comp:true", os.path.join("compilations", "$album", "$title")),
]
def _create_import_dir(self, count=3):
@@ -172,60 +173,69 @@ def _create_import_dir(self, count=3):
:param count: Number of files to create
"""
- self.import_dir = os.path.join(self.temp_dir, b'testsrcdir')
+ self.import_dir = os.path.join(self.temp_dir, b"testsrcdir")
if os.path.isdir(syspath(self.import_dir)):
shutil.rmtree(syspath(self.import_dir))
- album_path = os.path.join(self.import_dir, b'the_album')
+ album_path = os.path.join(self.import_dir, b"the_album")
os.makedirs(syspath(album_path))
- resource_path = os.path.join(_common.RSRC, b'full.mp3')
+ resource_path = os.path.join(_common.RSRC, b"full.mp3")
metadata = {
- 'artist': 'Tag Artist',
- 'album': 'Tag Album',
- 'albumartist': None,
- 'mb_trackid': None,
- 'mb_albumid': None,
- 'comp': None
+ "artist": "Tag Artist",
+ "album": "Tag Album",
+ "albumartist": None,
+ "mb_trackid": None,
+ "mb_albumid": None,
+ "comp": None,
}
self.media_files = []
for i in range(count):
# Copy files
medium_path = os.path.join(
- album_path,
- bytestring_path('track_%d.mp3' % (i + 1))
+ album_path, bytestring_path("track_%d.mp3" % (i + 1))
)
shutil.copy(syspath(resource_path), syspath(medium_path))
medium = MediaFile(medium_path)
# Set metadata
- metadata['track'] = i + 1
- metadata['title'] = 'Tag Title %d' % (i + 1)
+ metadata["track"] = i + 1
+ metadata["title"] = "Tag Title %d" % (i + 1)
for attr in metadata:
setattr(medium, attr, metadata[attr])
medium.save()
self.media_files.append(medium)
self.import_media = self.media_files
- def _setup_import_session(self, import_dir=None, delete=False,
- threaded=False, copy=True, singletons=False,
- move=False, autotag=True, link=False,
- hardlink=False):
- config['import']['copy'] = copy
- config['import']['delete'] = delete
- config['import']['timid'] = True
- config['threaded'] = False
- config['import']['singletons'] = singletons
- config['import']['move'] = move
- config['import']['autotag'] = autotag
- config['import']['resume'] = False
- config['import']['link'] = link
- config['import']['hardlink'] = hardlink
+ def _setup_import_session(
+ self,
+ import_dir=None,
+ delete=False,
+ threaded=False,
+ copy=True,
+ singletons=False,
+ move=False,
+ autotag=True,
+ link=False,
+ hardlink=False,
+ ):
+ config["import"]["copy"] = copy
+ config["import"]["delete"] = delete
+ config["import"]["timid"] = True
+ config["threaded"] = False
+ config["import"]["singletons"] = singletons
+ config["import"]["move"] = move
+ config["import"]["autotag"] = autotag
+ config["import"]["resume"] = False
+ config["import"]["link"] = link
+ config["import"]["hardlink"] = hardlink
self.importer = ImportSessionFixture(
- self.lib, loghandler=None, query=None,
- paths=[import_dir or self.import_dir]
+ self.lib,
+ loghandler=None,
+ query=None,
+ paths=[import_dir or self.import_dir],
)
def assert_file_in_lib(self, *segments):
@@ -247,7 +257,7 @@ def assert_lib_dir_empty(self):
class ScrubbedImportTest(_common.TestCase, ImportHelper):
def setUp(self):
self.setup_beets(disk=True)
- self.load_plugins('scrub')
+ self.load_plugins("scrub")
self._create_import_dir(2)
self._setup_import_session(autotag=False)
@@ -256,40 +266,40 @@ def tearDown(self):
self.teardown_beets()
def test_tags_not_scrubbed(self):
- config['plugins'] = ['scrub']
- config['scrub']['auto'] = False
- config['import']['write'] = True
+ config["plugins"] = ["scrub"]
+ config["scrub"]["auto"] = False
+ config["import"]["write"] = True
for mediafile in self.import_media:
- self.assertEqual(mediafile.artist, 'Tag Artist')
- self.assertEqual(mediafile.album, 'Tag Album')
+ self.assertEqual(mediafile.artist, "Tag Artist")
+ self.assertEqual(mediafile.album, "Tag Album")
self.importer.run()
for item in self.lib.items():
imported_file = os.path.join(item.path)
imported_file = MediaFile(imported_file)
- self.assertEqual(imported_file.artist, 'Tag Artist')
- self.assertEqual(imported_file.album, 'Tag Album')
+ self.assertEqual(imported_file.artist, "Tag Artist")
+ self.assertEqual(imported_file.album, "Tag Album")
def test_tags_restored(self):
- config['plugins'] = ['scrub']
- config['scrub']['auto'] = True
- config['import']['write'] = True
+ config["plugins"] = ["scrub"]
+ config["scrub"]["auto"] = True
+ config["import"]["write"] = True
for mediafile in self.import_media:
- self.assertEqual(mediafile.artist, 'Tag Artist')
- self.assertEqual(mediafile.album, 'Tag Album')
+ self.assertEqual(mediafile.artist, "Tag Artist")
+ self.assertEqual(mediafile.album, "Tag Album")
self.importer.run()
for item in self.lib.items():
imported_file = os.path.join(item.path)
imported_file = MediaFile(imported_file)
- self.assertEqual(imported_file.artist, 'Tag Artist')
- self.assertEqual(imported_file.album, 'Tag Album')
+ self.assertEqual(imported_file.artist, "Tag Artist")
+ self.assertEqual(imported_file.album, "Tag Album")
def test_tags_not_restored(self):
- config['plugins'] = ['scrub']
- config['scrub']['auto'] = True
- config['import']['write'] = False
+ config["plugins"] = ["scrub"]
+ config["scrub"]["auto"] = True
+ config["import"]["write"] = False
for mediafile in self.import_media:
- self.assertEqual(mediafile.artist, 'Tag Artist')
- self.assertEqual(mediafile.album, 'Tag Album')
+ self.assertEqual(mediafile.artist, "Tag Artist")
+ self.assertEqual(mediafile.album, "Tag Album")
self.importer.run()
for item in self.lib.items():
imported_file = os.path.join(item.path)
@@ -312,26 +322,30 @@ def test_album_created_with_track_artist(self):
self.importer.run()
albums = self.lib.albums()
self.assertEqual(len(albums), 1)
- self.assertEqual(albums[0].albumartist, 'Tag Artist')
+ self.assertEqual(albums[0].albumartist, "Tag Artist")
def test_import_copy_arrives(self):
self.importer.run()
for mediafile in self.import_media:
self.assert_file_in_lib(
- b'Tag Artist', b'Tag Album',
- util.bytestring_path(f'{mediafile.title}.mp3'))
+ b"Tag Artist",
+ b"Tag Album",
+ util.bytestring_path(f"{mediafile.title}.mp3"),
+ )
def test_threaded_import_copy_arrives(self):
- config['threaded'] = True
+ config["threaded"] = True
self.importer.run()
for mediafile in self.import_media:
self.assert_file_in_lib(
- b'Tag Artist', b'Tag Album',
- util.bytestring_path(f'{mediafile.title}.mp3'))
+ b"Tag Artist",
+ b"Tag Album",
+ util.bytestring_path(f"{mediafile.title}.mp3"),
+ )
def test_import_with_move_deletes_import_files(self):
- config['import']['move'] = True
+ config["import"]["move"] = True
for mediafile in self.import_media:
self.assertExists(mediafile.path)
@@ -340,91 +354,95 @@ def test_import_with_move_deletes_import_files(self):
self.assertNotExists(mediafile.path)
def test_import_with_move_prunes_directory_empty(self):
- config['import']['move'] = True
+ config["import"]["move"] = True
- self.assertExists(os.path.join(self.import_dir, b'the_album'))
+ self.assertExists(os.path.join(self.import_dir, b"the_album"))
self.importer.run()
- self.assertNotExists(os.path.join(self.import_dir, b'the_album'))
+ self.assertNotExists(os.path.join(self.import_dir, b"the_album"))
def test_import_with_move_prunes_with_extra_clutter(self):
- self.touch(os.path.join(self.import_dir, b'the_album', b'alog.log'))
- config['clutter'] = ['*.log']
- config['import']['move'] = True
+ self.touch(os.path.join(self.import_dir, b"the_album", b"alog.log"))
+ config["clutter"] = ["*.log"]
+ config["import"]["move"] = True
- self.assertExists(os.path.join(self.import_dir, b'the_album'))
+ self.assertExists(os.path.join(self.import_dir, b"the_album"))
self.importer.run()
- self.assertNotExists(os.path.join(self.import_dir, b'the_album'))
+ self.assertNotExists(os.path.join(self.import_dir, b"the_album"))
def test_threaded_import_move_arrives(self):
- config['import']['move'] = True
- config['import']['threaded'] = True
+ config["import"]["move"] = True
+ config["import"]["threaded"] = True
self.importer.run()
for mediafile in self.import_media:
self.assert_file_in_lib(
- b'Tag Artist', b'Tag Album',
- util.bytestring_path(f'{mediafile.title}.mp3'))
+ b"Tag Artist",
+ b"Tag Album",
+ util.bytestring_path(f"{mediafile.title}.mp3"),
+ )
def test_threaded_import_move_deletes_import(self):
- config['import']['move'] = True
- config['threaded'] = True
+ config["import"]["move"] = True
+ config["threaded"] = True
self.importer.run()
for mediafile in self.import_media:
self.assertNotExists(mediafile.path)
def test_import_without_delete_retains_files(self):
- config['import']['delete'] = False
+ config["import"]["delete"] = False
self.importer.run()
for mediafile in self.import_media:
self.assertExists(mediafile.path)
def test_import_with_delete_removes_files(self):
- config['import']['delete'] = True
+ config["import"]["delete"] = True
self.importer.run()
for mediafile in self.import_media:
self.assertNotExists(mediafile.path)
def test_import_with_delete_prunes_directory_empty(self):
- config['import']['delete'] = True
- self.assertExists(os.path.join(self.import_dir, b'the_album'))
+ config["import"]["delete"] = True
+ self.assertExists(os.path.join(self.import_dir, b"the_album"))
self.importer.run()
- self.assertNotExists(os.path.join(self.import_dir, b'the_album'))
+ self.assertNotExists(os.path.join(self.import_dir, b"the_album"))
@unittest.skipUnless(_common.HAVE_SYMLINK, "need symlinks")
def test_import_link_arrives(self):
- config['import']['link'] = True
+ config["import"]["link"] = True
self.importer.run()
for mediafile in self.import_media:
filename = os.path.join(
self.libdir,
- b'Tag Artist', b'Tag Album',
- util.bytestring_path(f'{mediafile.title}.mp3')
+ b"Tag Artist",
+ b"Tag Album",
+ util.bytestring_path(f"{mediafile.title}.mp3"),
)
self.assertExists(filename)
self.assertTrue(os.path.islink(syspath(filename)))
self.assert_equal_path(
util.bytestring_path(os.readlink(syspath(filename))),
- mediafile.path
+ mediafile.path,
)
@unittest.skipUnless(_common.HAVE_HARDLINK, "need hardlinks")
def test_import_hardlink_arrives(self):
- config['import']['hardlink'] = True
+ config["import"]["hardlink"] = True
self.importer.run()
for mediafile in self.import_media:
filename = os.path.join(
self.libdir,
- b'Tag Artist', b'Tag Album',
- util.bytestring_path(f'{mediafile.title}.mp3')
+ b"Tag Artist",
+ b"Tag Album",
+ util.bytestring_path(f"{mediafile.title}.mp3"),
)
self.assertExists(filename)
s1 = os.stat(syspath(mediafile.path))
s2 = os.stat(syspath(filename))
self.assertTrue(
- (s1[stat.ST_INO], s1[stat.ST_DEV]) ==
- (s2[stat.ST_INO], s2[stat.ST_DEV])
+ (s1[stat.ST_INO], s1[stat.ST_DEV])
+ == (s2[stat.ST_INO], s2[stat.ST_DEV])
)
@@ -432,9 +450,8 @@ def create_archive(session):
(handle, path) = mkstemp(dir=py3_path(session.temp_dir))
path = bytestring_path(path)
os.close(handle)
- archive = ZipFile(py3_path(path), mode='w')
- archive.write(syspath(os.path.join(_common.RSRC, b'full.mp3')),
- 'full.mp3')
+ archive = ZipFile(py3_path(path), mode="w")
+ archive.write(syspath(os.path.join(_common.RSRC, b"full.mp3")), "full.mp3")
archive.close()
path = bytestring_path(path)
return path
@@ -448,7 +465,7 @@ class RmTempTest(unittest.TestCase, ImportHelper, _common.Assertions):
def setUp(self):
self.setup_beets()
self.want_resume = False
- self.config['incremental'] = False
+ self.config["incremental"] = False
self._old_home = None
def tearDown(self):
@@ -466,7 +483,6 @@ def test_rm(self):
class ImportZipTest(unittest.TestCase, ImportHelper):
-
def setUp(self):
self.setup_beets()
@@ -485,36 +501,33 @@ def test_import_zip(self):
class ImportTarTest(ImportZipTest):
-
def create_archive(self):
(handle, path) = mkstemp(dir=syspath(self.temp_dir))
path = bytestring_path(path)
os.close(handle)
- archive = TarFile(py3_path(path), mode='w')
- archive.add(syspath(os.path.join(_common.RSRC, b'full.mp3')),
- 'full.mp3')
+ archive = TarFile(py3_path(path), mode="w")
+ archive.add(
+ syspath(os.path.join(_common.RSRC, b"full.mp3")), "full.mp3"
+ )
archive.close()
return path
-@unittest.skipIf(not has_program('unrar'), 'unrar program not found')
+@unittest.skipIf(not has_program("unrar"), "unrar program not found")
class ImportRarTest(ImportZipTest):
-
def create_archive(self):
- return os.path.join(_common.RSRC, b'archive.rar')
+ return os.path.join(_common.RSRC, b"archive.rar")
class Import7zTest(ImportZipTest):
-
def create_archive(self):
- return os.path.join(_common.RSRC, b'archive.7z')
+ return os.path.join(_common.RSRC, b"archive.7z")
-@unittest.skip('Implement me!')
+@unittest.skip("Implement me!")
class ImportPasswordRarTest(ImportZipTest):
-
def create_archive(self):
- return os.path.join(_common.RSRC, b'password.rar')
+ return os.path.join(_common.RSRC, b"password.rar")
class ImportSingletonTest(_common.TestCase, ImportHelper):
@@ -526,7 +539,7 @@ def setUp(self):
self.setup_beets()
self._create_import_dir(1)
self._setup_import_session()
- config['import']['singletons'] = True
+ config["import"]["singletons"] = True
self.matcher = AutotagStub().install()
def tearDown(self):
@@ -538,7 +551,7 @@ def test_apply_asis_adds_track(self):
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assertEqual(self.lib.items().get().title, 'Tag Title 1')
+ self.assertEqual(self.lib.items().get().title, "Tag Title 1")
def test_apply_asis_does_not_add_album(self):
self.assertEqual(self.lib.albums().get(), None)
@@ -552,14 +565,14 @@ def test_apply_asis_adds_singleton_path(self):
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assert_file_in_lib(b'singletons', b'Tag Title 1.mp3')
+ self.assert_file_in_lib(b"singletons", b"Tag Title 1.mp3")
def test_apply_candidate_adds_track(self):
self.assertEqual(self.lib.items().get(), None)
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assertEqual(self.lib.items().get().title, 'Applied Title 1')
+ self.assertEqual(self.lib.items().get().title, "Applied Title 1")
def test_apply_candidate_does_not_add_album(self):
self.importer.add_choice(importer.action.APPLY)
@@ -571,7 +584,7 @@ def test_apply_candidate_adds_singleton_path(self):
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assert_file_in_lib(b'singletons', b'Applied Title 1.mp3')
+ self.assert_file_in_lib(b"singletons", b"Applied Title 1.mp3")
def test_skip_does_not_add_first_track(self):
self.importer.add_choice(importer.action.SKIP)
@@ -586,13 +599,13 @@ def test_skip_adds_other_tracks(self):
self.assertEqual(len(self.lib.items()), 1)
def test_import_single_files(self):
- resource_path = os.path.join(_common.RSRC, b'empty.mp3')
- single_path = os.path.join(self.import_dir, b'track_2.mp3')
+ resource_path = os.path.join(_common.RSRC, b"empty.mp3")
+ single_path = os.path.join(self.import_dir, b"track_2.mp3")
util.copy(resource_path, single_path)
import_files = [
- os.path.join(self.import_dir, b'the_album'),
- single_path
+ os.path.join(self.import_dir, b"the_album"),
+ single_path,
]
self._setup_import_session(singletons=False)
self.importer.paths = import_files
@@ -608,10 +621,10 @@ def test_set_fields(self):
genre = "\U0001F3B7 Jazz"
collection = "To Listen"
- config['import']['set_fields'] = {
- 'collection': collection,
- 'genre': genre,
- 'title': "$title - formatted",
+ config["import"]["set_fields"] = {
+ "collection": collection,
+ "genre": genre,
+ "title": "$title - formatted",
}
# As-is item import.
@@ -641,8 +654,8 @@ def test_set_fields(self):
class ImportTest(_common.TestCase, ImportHelper):
- """Test APPLY, ASIS and SKIP choices.
- """
+ """Test APPLY, ASIS and SKIP choices."""
+
def setUp(self):
self.setup_beets()
self._create_import_dir(1)
@@ -659,35 +672,34 @@ def test_apply_asis_adds_album(self):
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assertEqual(self.lib.albums().get().album, 'Tag Album')
+ self.assertEqual(self.lib.albums().get().album, "Tag Album")
def test_apply_asis_adds_tracks(self):
self.assertEqual(self.lib.items().get(), None)
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assertEqual(self.lib.items().get().title, 'Tag Title 1')
+ self.assertEqual(self.lib.items().get().title, "Tag Title 1")
def test_apply_asis_adds_album_path(self):
self.assert_lib_dir_empty()
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assert_file_in_lib(
- b'Tag Artist', b'Tag Album', b'Tag Title 1.mp3')
+ self.assert_file_in_lib(b"Tag Artist", b"Tag Album", b"Tag Title 1.mp3")
def test_apply_candidate_adds_album(self):
self.assertEqual(self.lib.albums().get(), None)
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assertEqual(self.lib.albums().get().album, 'Applied Album')
+ self.assertEqual(self.lib.albums().get().album, "Applied Album")
def test_apply_candidate_adds_tracks(self):
self.assertEqual(self.lib.items().get(), None)
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assertEqual(self.lib.items().get().title, 'Applied Title 1')
+ self.assertEqual(self.lib.items().get().title, "Applied Title 1")
def test_apply_candidate_adds_album_path(self):
self.assert_lib_dir_empty()
@@ -695,28 +707,29 @@ def test_apply_candidate_adds_album_path(self):
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
self.assert_file_in_lib(
- b'Applied Artist', b'Applied Album', b'Applied Title 1.mp3')
+ b"Applied Artist", b"Applied Album", b"Applied Title 1.mp3"
+ )
def test_apply_from_scratch_removes_other_metadata(self):
- config['import']['from_scratch'] = True
+ config["import"]["from_scratch"] = True
for mediafile in self.import_media:
- mediafile.genre = 'Tag Genre'
+ mediafile.genre = "Tag Genre"
mediafile.save()
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assertEqual(self.lib.items().get().genre, '')
+ self.assertEqual(self.lib.items().get().genre, "")
def test_apply_from_scratch_keeps_format(self):
- config['import']['from_scratch'] = True
+ config["import"]["from_scratch"] = True
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assertEqual(self.lib.items().get().format, 'MP3')
+ self.assertEqual(self.lib.items().get().format, "MP3")
def test_apply_from_scratch_keeps_bitrate(self):
- config['import']['from_scratch'] = True
+ config["import"]["from_scratch"] = True
bitrate = 80000
self.importer.add_choice(importer.action.APPLY)
@@ -724,10 +737,11 @@ def test_apply_from_scratch_keeps_bitrate(self):
self.assertEqual(self.lib.items().get().bitrate, bitrate)
def test_apply_with_move_deletes_import(self):
- config['import']['move'] = True
+ config["import"]["move"] = True
import_file = os.path.join(
- self.import_dir, b'the_album', b'track_1.mp3')
+ self.import_dir, b"the_album", b"track_1.mp3"
+ )
self.assertExists(import_file)
self.importer.add_choice(importer.action.APPLY)
@@ -735,10 +749,11 @@ def test_apply_with_move_deletes_import(self):
self.assertNotExists(import_file)
def test_apply_with_delete_deletes_import(self):
- config['import']['delete'] = True
+ config["import"]["delete"] = True
- import_file = os.path.join(self.import_dir,
- b'the_album', b'track_1.mp3')
+ import_file = os.path.join(
+ self.import_dir, b"the_album", b"track_1.mp3"
+ )
self.assertExists(import_file)
self.importer.add_choice(importer.action.APPLY)
@@ -751,8 +766,8 @@ def test_skip_does_not_add_track(self):
self.assertEqual(self.lib.items().get(), None)
def test_skip_non_album_dirs(self):
- self.assertIsDir(os.path.join(self.import_dir, b'the_album'))
- self.touch(b'cruft', dir=self.import_dir)
+ self.assertIsDir(os.path.join(self.import_dir, b"the_album"))
+ self.touch(b"cruft", dir=self.import_dir)
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
self.assertEqual(len(self.lib.albums()), 1)
@@ -765,24 +780,24 @@ def test_unmatched_tracks_not_added(self):
self.assertEqual(len(self.lib.items()), 1)
def test_empty_directory_warning(self):
- import_dir = os.path.join(self.temp_dir, b'empty')
- self.touch(b'non-audio', dir=import_dir)
+ import_dir = os.path.join(self.temp_dir, b"empty")
+ self.touch(b"non-audio", dir=import_dir)
self._setup_import_session(import_dir=import_dir)
with capture_log() as logs:
self.importer.run()
import_dir = displayable_path(import_dir)
- self.assertIn(f'No files imported from {import_dir}', logs)
+ self.assertIn(f"No files imported from {import_dir}", logs)
def test_empty_directory_singleton_warning(self):
- import_dir = os.path.join(self.temp_dir, b'empty')
- self.touch(b'non-audio', dir=import_dir)
+ import_dir = os.path.join(self.temp_dir, b"empty")
+ self.touch(b"non-audio", dir=import_dir)
self._setup_import_session(import_dir=import_dir, singletons=True)
with capture_log() as logs:
self.importer.run()
import_dir = displayable_path(import_dir)
- self.assertIn(f'No files imported from {import_dir}', logs)
+ self.assertIn(f"No files imported from {import_dir}", logs)
def test_asis_no_data_source(self):
self.assertEqual(self.lib.items().get(), None)
@@ -798,11 +813,11 @@ def test_set_fields(self):
collection = "To Listen"
comments = "managed by beets"
- config['import']['set_fields'] = {
- 'genre': genre,
- 'collection': collection,
- 'comments': comments,
- 'album': "$album - formatted",
+ config["import"]["set_fields"] = {
+ "genre": genre,
+ "collection": collection,
+ "comments": comments,
+ "album": "$album - formatted",
}
# As-is album import.
@@ -815,18 +830,16 @@ def test_set_fields(self):
self.assertEqual(album.genre, genre)
self.assertEqual(album.comments, comments)
for item in album.items():
+ self.assertEqual(item.get("genre", with_album=False), genre)
self.assertEqual(
- item.get("genre", with_album=False),
- genre)
- self.assertEqual(
- item.get("collection", with_album=False),
- collection)
+ item.get("collection", with_album=False), collection
+ )
self.assertEqual(
- item.get("comments", with_album=False),
- comments)
+ item.get("comments", with_album=False), comments
+ )
self.assertEqual(
- item.get("album", with_album=False),
- "Tag Album - formatted")
+ item.get("album", with_album=False), "Tag Album - formatted"
+ )
# Remove album from library to test again with APPLY choice.
album.remove()
@@ -841,23 +854,22 @@ def test_set_fields(self):
self.assertEqual(album.genre, genre)
self.assertEqual(album.comments, comments)
for item in album.items():
+ self.assertEqual(item.get("genre", with_album=False), genre)
self.assertEqual(
- item.get("genre", with_album=False),
- genre)
- self.assertEqual(
- item.get("collection", with_album=False),
- collection)
+ item.get("collection", with_album=False), collection
+ )
self.assertEqual(
- item.get("comments", with_album=False),
- comments)
+ item.get("comments", with_album=False), comments
+ )
self.assertEqual(
- item.get("album", with_album=False),
- "Applied Album - formatted")
+ item.get("album", with_album=False),
+ "Applied Album - formatted",
+ )
class ImportTracksTest(_common.TestCase, ImportHelper):
- """Test TRACKS and APPLY choice.
- """
+ """Test TRACKS and APPLY choice."""
+
def setUp(self):
self.setup_beets()
self._create_import_dir(1)
@@ -876,7 +888,7 @@ def test_apply_tracks_adds_singleton_track(self):
self.importer.add_choice(importer.action.APPLY)
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assertEqual(self.lib.items().get().title, 'Applied Title 1')
+ self.assertEqual(self.lib.items().get().title, "Applied Title 1")
self.assertEqual(self.lib.albums().get(), None)
def test_apply_tracks_adds_singleton_path(self):
@@ -886,12 +898,12 @@ def test_apply_tracks_adds_singleton_path(self):
self.importer.add_choice(importer.action.APPLY)
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assert_file_in_lib(b'singletons', b'Applied Title 1.mp3')
+ self.assert_file_in_lib(b"singletons", b"Applied Title 1.mp3")
class ImportCompilationTest(_common.TestCase, ImportHelper):
- """Test ASIS import of a folder containing tracks with different artists.
- """
+ """Test ASIS import of a folder containing tracks with different artists."""
+
def setUp(self):
self.setup_beets()
self._create_import_dir(3)
@@ -905,27 +917,26 @@ def tearDown(self):
def test_asis_homogenous_sets_albumartist(self):
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assertEqual(self.lib.albums().get().albumartist, 'Tag Artist')
+ self.assertEqual(self.lib.albums().get().albumartist, "Tag Artist")
for item in self.lib.items():
- self.assertEqual(item.albumartist, 'Tag Artist')
+ self.assertEqual(item.albumartist, "Tag Artist")
def test_asis_heterogenous_sets_various_albumartist(self):
- self.import_media[0].artist = 'Other Artist'
+ self.import_media[0].artist = "Other Artist"
self.import_media[0].save()
- self.import_media[1].artist = 'Another Artist'
+ self.import_media[1].artist = "Another Artist"
self.import_media[1].save()
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assertEqual(self.lib.albums().get().albumartist,
- 'Various Artists')
+ self.assertEqual(self.lib.albums().get().albumartist, "Various Artists")
for item in self.lib.items():
- self.assertEqual(item.albumartist, 'Various Artists')
+ self.assertEqual(item.albumartist, "Various Artists")
def test_asis_heterogenous_sets_compilation(self):
- self.import_media[0].artist = 'Other Artist'
+ self.import_media[0].artist = "Other Artist"
self.import_media[0].save()
- self.import_media[1].artist = 'Another Artist'
+ self.import_media[1].artist = "Another Artist"
self.import_media[1].save()
self.importer.add_choice(importer.action.ASIS)
@@ -934,85 +945,84 @@ def test_asis_heterogenous_sets_compilation(self):
self.assertTrue(item.comp)
def test_asis_sets_majority_albumartist(self):
- self.import_media[0].artist = 'Other Artist'
+ self.import_media[0].artist = "Other Artist"
self.import_media[0].save()
- self.import_media[1].artist = 'Other Artist'
+ self.import_media[1].artist = "Other Artist"
self.import_media[1].save()
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assertEqual(self.lib.albums().get().albumartist, 'Other Artist')
+ self.assertEqual(self.lib.albums().get().albumartist, "Other Artist")
for item in self.lib.items():
- self.assertEqual(item.albumartist, 'Other Artist')
+ self.assertEqual(item.albumartist, "Other Artist")
def test_asis_albumartist_tag_sets_albumartist(self):
- self.import_media[0].artist = 'Other Artist'
- self.import_media[1].artist = 'Another Artist'
+ self.import_media[0].artist = "Other Artist"
+ self.import_media[1].artist = "Another Artist"
for mediafile in self.import_media:
- mediafile.albumartist = 'Album Artist'
- mediafile.mb_albumartistid = 'Album Artist ID'
+ mediafile.albumartist = "Album Artist"
+ mediafile.mb_albumartistid = "Album Artist ID"
mediafile.save()
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assertEqual(self.lib.albums().get().albumartist, 'Album Artist')
- self.assertEqual(self.lib.albums().get().mb_albumartistid,
- 'Album Artist ID')
+ self.assertEqual(self.lib.albums().get().albumartist, "Album Artist")
+ self.assertEqual(
+ self.lib.albums().get().mb_albumartistid, "Album Artist ID"
+ )
for item in self.lib.items():
- self.assertEqual(item.albumartist, 'Album Artist')
- self.assertEqual(item.mb_albumartistid, 'Album Artist ID')
+ self.assertEqual(item.albumartist, "Album Artist")
+ self.assertEqual(item.mb_albumartistid, "Album Artist ID")
def test_asis_albumartists_tag_sets_multi_albumartists(self):
- self.import_media[0].artist = 'Other Artist'
- self.import_media[0].artists = ['Other Artist', 'Other Artist 2']
- self.import_media[1].artist = 'Another Artist'
- self.import_media[1].artists = ['Another Artist', 'Another Artist 2']
+ self.import_media[0].artist = "Other Artist"
+ self.import_media[0].artists = ["Other Artist", "Other Artist 2"]
+ self.import_media[1].artist = "Another Artist"
+ self.import_media[1].artists = ["Another Artist", "Another Artist 2"]
for mediafile in self.import_media:
- mediafile.albumartist = 'Album Artist'
- mediafile.albumartists = ['Album Artist 1', 'Album Artist 2']
- mediafile.mb_albumartistid = 'Album Artist ID'
+ mediafile.albumartist = "Album Artist"
+ mediafile.albumartists = ["Album Artist 1", "Album Artist 2"]
+ mediafile.mb_albumartistid = "Album Artist ID"
mediafile.save()
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assertEqual(self.lib.albums().get().albumartist, 'Album Artist')
+ self.assertEqual(self.lib.albums().get().albumartist, "Album Artist")
self.assertEqual(
self.lib.albums().get().albumartists,
- ['Album Artist 1', 'Album Artist 2']
+ ["Album Artist 1", "Album Artist 2"],
+ )
+ self.assertEqual(
+ self.lib.albums().get().mb_albumartistid, "Album Artist ID"
)
- self.assertEqual(self.lib.albums().get().mb_albumartistid,
- 'Album Artist ID')
# Make sure both custom media items get tested
asserted_multi_artists_0 = False
asserted_multi_artists_1 = False
for item in self.lib.items():
- self.assertEqual(item.albumartist, 'Album Artist')
+ self.assertEqual(item.albumartist, "Album Artist")
self.assertEqual(
- item.albumartists,
- ['Album Artist 1', 'Album Artist 2']
+ item.albumartists, ["Album Artist 1", "Album Artist 2"]
)
- self.assertEqual(item.mb_albumartistid, 'Album Artist ID')
+ self.assertEqual(item.mb_albumartistid, "Album Artist ID")
if item.artist == "Other Artist":
asserted_multi_artists_0 = True
self.assertEqual(
- item.artists,
- ['Other Artist', 'Other Artist 2']
+ item.artists, ["Other Artist", "Other Artist 2"]
)
if item.artist == "Another Artist":
asserted_multi_artists_1 = True
self.assertEqual(
- item.artists,
- ['Another Artist', 'Another Artist 2']
+ item.artists, ["Another Artist", "Another Artist 2"]
)
self.assertTrue(asserted_multi_artists_0 and asserted_multi_artists_1)
class ImportExistingTest(_common.TestCase, ImportHelper):
- """Test importing files that are already in the library directory.
- """
+ """Test importing files that are already in the library directory."""
+
def setUp(self):
self.setup_beets()
self._create_import_dir(1)
@@ -1058,68 +1068,76 @@ def test_does_not_duplicate_singleton_track(self):
def test_asis_updates_metadata(self):
self.setup_importer.run()
medium = MediaFile(self.lib.items().get().path)
- medium.title = 'New Title'
+ medium.title = "New Title"
medium.save()
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assertEqual(self.lib.items().get().title, 'New Title')
+ self.assertEqual(self.lib.items().get().title, "New Title")
def test_asis_updated_moves_file(self):
self.setup_importer.run()
medium = MediaFile(self.lib.items().get().path)
- medium.title = 'New Title'
+ medium.title = "New Title"
medium.save()
- old_path = os.path.join(b'Applied Artist', b'Applied Album',
- b'Applied Title 1.mp3')
+ old_path = os.path.join(
+ b"Applied Artist", b"Applied Album", b"Applied Title 1.mp3"
+ )
self.assert_file_in_lib(old_path)
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assert_file_in_lib(b'Applied Artist', b'Applied Album',
- b'New Title.mp3')
+ self.assert_file_in_lib(
+ b"Applied Artist", b"Applied Album", b"New Title.mp3"
+ )
self.assert_file_not_in_lib(old_path)
def test_asis_updated_without_copy_does_not_move_file(self):
self.setup_importer.run()
medium = MediaFile(self.lib.items().get().path)
- medium.title = 'New Title'
+ medium.title = "New Title"
medium.save()
- old_path = os.path.join(b'Applied Artist', b'Applied Album',
- b'Applied Title 1.mp3')
+ old_path = os.path.join(
+ b"Applied Artist", b"Applied Album", b"Applied Title 1.mp3"
+ )
self.assert_file_in_lib(old_path)
- config['import']['copy'] = False
+ config["import"]["copy"] = False
self.importer.add_choice(importer.action.ASIS)
self.importer.run()
- self.assert_file_not_in_lib(b'Applied Artist', b'Applied Album',
- b'New Title.mp3')
+ self.assert_file_not_in_lib(
+ b"Applied Artist", b"Applied Album", b"New Title.mp3"
+ )
self.assert_file_in_lib(old_path)
def test_outside_file_is_copied(self):
- config['import']['copy'] = False
+ config["import"]["copy"] = False
self.setup_importer.run()
- self.assert_equal_path(self.lib.items().get().path,
- self.import_media[0].path)
+ self.assert_equal_path(
+ self.lib.items().get().path, self.import_media[0].path
+ )
- config['import']['copy'] = True
+ config["import"]["copy"] = True
self._setup_import_session()
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- new_path = os.path.join(b'Applied Artist', b'Applied Album',
- b'Applied Title 1.mp3')
+ new_path = os.path.join(
+ b"Applied Artist", b"Applied Album", b"Applied Title 1.mp3"
+ )
self.assert_file_in_lib(new_path)
- self.assert_equal_path(self.lib.items().get().path,
- os.path.join(self.libdir, new_path))
+ self.assert_equal_path(
+ self.lib.items().get().path, os.path.join(self.libdir, new_path)
+ )
def test_outside_file_is_moved(self):
- config['import']['copy'] = False
+ config["import"]["copy"] = False
self.setup_importer.run()
- self.assert_equal_path(self.lib.items().get().path,
- self.import_media[0].path)
+ self.assert_equal_path(
+ self.lib.items().get().path, self.import_media[0].path
+ )
self._setup_import_session(move=True)
self.importer.add_choice(importer.action.APPLY)
@@ -1151,7 +1169,7 @@ def test_add_album_for_different_artist_and_different_album(self):
self.importer.run()
albums = {album.album for album in self.lib.albums()}
- self.assertEqual(albums, {'Album B', 'Tag Album'})
+ self.assertEqual(albums, {"Album B", "Tag Album"})
def test_add_album_for_different_artist_and_same_albumartist(self):
self.import_media[0].artist = "Artist B"
@@ -1163,7 +1181,7 @@ def test_add_album_for_different_artist_and_same_albumartist(self):
self.importer.run()
artists = {album.albumartist for album in self.lib.albums()}
- self.assertEqual(artists, {'Album Artist', 'Tag Artist'})
+ self.assertEqual(artists, {"Album Artist", "Tag Artist"})
def test_add_album_for_same_artist_and_different_album(self):
self.import_media[0].album = "Album B"
@@ -1171,7 +1189,7 @@ def test_add_album_for_same_artist_and_different_album(self):
self.importer.run()
albums = {album.album for album in self.lib.albums()}
- self.assertEqual(albums, {'Album B', 'Tag Album'})
+ self.assertEqual(albums, {"Album B", "Tag Album"})
def test_add_album_for_same_album_and_different_artist(self):
self.import_media[0].artist = "Artist B"
@@ -1179,25 +1197,24 @@ def test_add_album_for_same_album_and_different_artist(self):
self.importer.run()
artists = {album.albumartist for album in self.lib.albums()}
- self.assertEqual(artists, {'Artist B', 'Tag Artist'})
+ self.assertEqual(artists, {"Artist B", "Tag Artist"})
def test_incremental(self):
- config['import']['incremental'] = True
+ config["import"]["incremental"] = True
self.import_media[0].album = "Album B"
self.import_media[0].save()
self.importer.run()
albums = {album.album for album in self.lib.albums()}
- self.assertEqual(albums, {'Album B', 'Tag Album'})
+ self.assertEqual(albums, {"Album B", "Tag Album"})
class GlobalGroupAlbumsImportTest(GroupAlbumsImportTest):
-
def setUp(self):
super().setUp()
self.importer.clear_choices()
self.importer.default_choice = importer.action.ASIS
- config['import']['group_albums'] = True
+ config["import"]["group_albums"] = True
class ChooseCandidateTest(_common.TestCase, ImportHelper):
@@ -1215,12 +1232,12 @@ def tearDown(self):
def test_choose_first_candidate(self):
self.importer.add_choice(1)
self.importer.run()
- self.assertEqual(self.lib.albums().get().album, 'Applied Album M')
+ self.assertEqual(self.lib.albums().get().album, "Applied Album M")
def test_choose_second_candidate(self):
self.importer.add_choice(2)
self.importer.run()
- self.assertEqual(self.lib.albums().get().album, 'Applied Album MM')
+ self.assertEqual(self.lib.albums().get().album, "Applied Album MM")
class InferAlbumDataTest(_common.TestCase):
@@ -1230,16 +1247,17 @@ def setUp(self):
i1 = _common.item()
i2 = _common.item()
i3 = _common.item()
- i1.title = 'first item'
- i2.title = 'second item'
- i3.title = 'third item'
+ i1.title = "first item"
+ i2.title = "second item"
+ i3.title = "third item"
i1.comp = i2.comp = i3.comp = False
- i1.albumartist = i2.albumartist = i3.albumartist = ''
- i1.mb_albumartistid = i2.mb_albumartistid = i3.mb_albumartistid = ''
+ i1.albumartist = i2.albumartist = i3.albumartist = ""
+ i1.mb_albumartistid = i2.mb_albumartistid = i3.mb_albumartistid = ""
self.items = [i1, i2, i3]
- self.task = importer.ImportTask(paths=['a path'], toppath='top path',
- items=self.items)
+ self.task = importer.ImportTask(
+ paths=["a path"], toppath="top path", items=self.items
+ )
def test_asis_homogenous_single_artist(self):
self.task.set_choice(importer.action.ASIS)
@@ -1248,28 +1266,28 @@ def test_asis_homogenous_single_artist(self):
self.assertEqual(self.items[0].albumartist, self.items[2].artist)
def test_asis_heterogenous_va(self):
- self.items[0].artist = 'another artist'
- self.items[1].artist = 'some other artist'
+ self.items[0].artist = "another artist"
+ self.items[1].artist = "some other artist"
self.task.set_choice(importer.action.ASIS)
self.task.align_album_level_fields()
self.assertTrue(self.items[0].comp)
- self.assertEqual(self.items[0].albumartist, 'Various Artists')
+ self.assertEqual(self.items[0].albumartist, "Various Artists")
def test_asis_comp_applied_to_all_items(self):
- self.items[0].artist = 'another artist'
- self.items[1].artist = 'some other artist'
+ self.items[0].artist = "another artist"
+ self.items[1].artist = "some other artist"
self.task.set_choice(importer.action.ASIS)
self.task.align_album_level_fields()
for item in self.items:
self.assertTrue(item.comp)
- self.assertEqual(item.albumartist, 'Various Artists')
+ self.assertEqual(item.albumartist, "Various Artists")
def test_asis_majority_artist_single_artist(self):
- self.items[0].artist = 'another artist'
+ self.items[0].artist = "another artist"
self.task.set_choice(importer.action.ASIS)
self.task.align_album_level_fields()
@@ -1278,19 +1296,17 @@ def test_asis_majority_artist_single_artist(self):
self.assertEqual(self.items[0].albumartist, self.items[2].artist)
def test_asis_track_albumartist_override(self):
- self.items[0].artist = 'another artist'
- self.items[1].artist = 'some other artist'
+ self.items[0].artist = "another artist"
+ self.items[1].artist = "some other artist"
for item in self.items:
- item.albumartist = 'some album artist'
- item.mb_albumartistid = 'some album artist id'
+ item.albumartist = "some album artist"
+ item.mb_albumartistid = "some album artist id"
self.task.set_choice(importer.action.ASIS)
self.task.align_album_level_fields()
- self.assertEqual(self.items[0].albumartist,
- 'some album artist')
- self.assertEqual(self.items[0].mb_albumartistid,
- 'some album artist id')
+ self.assertEqual(self.items[0].albumartist, "some album artist")
+ self.assertEqual(self.items[0].mb_albumartistid, "some album artist id")
def test_apply_gets_artist_and_id(self):
self.task.set_choice(AlbumMatch(0, None, {}, set(), set())) # APPLY
@@ -1298,21 +1314,20 @@ def test_apply_gets_artist_and_id(self):
self.task.align_album_level_fields()
self.assertEqual(self.items[0].albumartist, self.items[0].artist)
- self.assertEqual(self.items[0].mb_albumartistid,
- self.items[0].mb_artistid)
+ self.assertEqual(
+ self.items[0].mb_albumartistid, self.items[0].mb_artistid
+ )
def test_apply_lets_album_values_override(self):
for item in self.items:
- item.albumartist = 'some album artist'
- item.mb_albumartistid = 'some album artist id'
+ item.albumartist = "some album artist"
+ item.mb_albumartistid = "some album artist id"
self.task.set_choice(AlbumMatch(0, None, {}, set(), set())) # APPLY
self.task.align_album_level_fields()
- self.assertEqual(self.items[0].albumartist,
- 'some album artist')
- self.assertEqual(self.items[0].mb_albumartistid,
- 'some album artist id')
+ self.assertEqual(self.items[0].albumartist, "some album artist")
+ self.assertEqual(self.items[0].mb_albumartistid, "some album artist id")
def test_small_single_artist_album(self):
self.items = [self.items[0]]
@@ -1323,45 +1338,44 @@ def test_small_single_artist_album(self):
def match_album_mock(*args, **kwargs):
- """Create an AlbumInfo object for testing.
- """
+ """Create an AlbumInfo object for testing."""
track_info = TrackInfo(
- title='new title',
- track_id='trackid',
+ title="new title",
+ track_id="trackid",
index=0,
)
album_info = AlbumInfo(
- artist='artist',
- album='album',
+ artist="artist",
+ album="album",
tracks=[track_info],
- album_id='albumid',
- artist_id='artistid',
- flex='flex',
+ album_id="albumid",
+ artist_id="artistid",
+ flex="flex",
)
return iter([album_info])
-@patch('beets.autotag.mb.match_album', Mock(side_effect=match_album_mock))
-class ImportDuplicateAlbumTest(unittest.TestCase, TestHelper,
- _common.Assertions):
-
+@patch("beets.autotag.mb.match_album", Mock(side_effect=match_album_mock))
+class ImportDuplicateAlbumTest(
+ unittest.TestCase, TestHelper, _common.Assertions
+):
def setUp(self):
self.setup_beets()
# Original album
- self.add_album_fixture(albumartist='artist', album='album')
+ self.add_album_fixture(albumartist="artist", album="album")
# Create import session
self.importer = self.create_importer()
- config['import']['autotag'] = True
- config['import']['duplicate_keys']['album'] = 'albumartist album'
+ config["import"]["autotag"] = True
+ config["import"]["duplicate_keys"]["album"] = "albumartist album"
def tearDown(self):
self.teardown_beets()
def test_remove_duplicate_album(self):
item = self.lib.items().get()
- self.assertEqual(item.title, 't\xeftle 0')
+ self.assertEqual(item.title, "t\xeftle 0")
self.assertExists(item.path)
self.importer.default_resolution = self.importer.Resolution.REMOVE
@@ -1371,23 +1385,24 @@ def test_remove_duplicate_album(self):
self.assertEqual(len(self.lib.albums()), 1)
self.assertEqual(len(self.lib.items()), 1)
item = self.lib.items().get()
- self.assertEqual(item.title, 'new title')
+ self.assertEqual(item.title, "new title")
def test_no_autotag_keeps_duplicate_album(self):
- config['import']['autotag'] = False
+ config["import"]["autotag"] = False
item = self.lib.items().get()
- self.assertEqual(item.title, 't\xeftle 0')
+ self.assertEqual(item.title, "t\xeftle 0")
self.assertExists(item.path)
# Imported item has the same artist and album as the one in the
# library.
- import_file = os.path.join(self.importer.paths[0],
- b'album 0', b'track 0.mp3')
+ import_file = os.path.join(
+ self.importer.paths[0], b"album 0", b"track 0.mp3"
+ )
import_file = MediaFile(import_file)
- import_file.artist = item['artist']
- import_file.albumartist = item['artist']
- import_file.album = item['album']
- import_file.title = 'new title'
+ import_file.artist = item["artist"]
+ import_file.albumartist = item["artist"]
+ import_file.album = item["album"]
+ import_file.title = "new title"
self.importer.default_resolution = self.importer.Resolution.REMOVE
self.importer.run()
@@ -1405,7 +1420,7 @@ def test_keep_duplicate_album(self):
def test_skip_duplicate_album(self):
item = self.lib.items().get()
- self.assertEqual(item.title, 't\xeftle 0')
+ self.assertEqual(item.title, "t\xeftle 0")
self.importer.default_resolution = self.importer.Resolution.SKIP
self.importer.run()
@@ -1413,7 +1428,7 @@ def test_skip_duplicate_album(self):
self.assertEqual(len(self.lib.albums()), 1)
self.assertEqual(len(self.lib.items()), 1)
item = self.lib.items().get()
- self.assertEqual(item.title, 't\xeftle 0')
+ self.assertEqual(item.title, "t\xeftle 0")
def test_merge_duplicate_album(self):
self.importer.default_resolution = self.importer.Resolution.MERGE
@@ -1422,19 +1437,20 @@ def test_merge_duplicate_album(self):
self.assertEqual(len(self.lib.albums()), 1)
def test_twice_in_import_dir(self):
- self.skipTest('write me')
+ self.skipTest("write me")
def test_keep_when_extra_key_is_different(self):
- config['import']['duplicate_keys']['album'] = 'albumartist album flex'
+ config["import"]["duplicate_keys"]["album"] = "albumartist album flex"
item = self.lib.items().get()
- import_file = MediaFile(os.path.join(
- self.importer.paths[0], b'album 0', b'track 0.mp3'))
- import_file.artist = item['artist']
- import_file.albumartist = item['artist']
- import_file.album = item['album']
- import_file.title = item['title']
- import_file.flex = 'different'
+ import_file = MediaFile(
+ os.path.join(self.importer.paths[0], b"album 0", b"track 0.mp3")
+ )
+ import_file.artist = item["artist"]
+ import_file.albumartist = item["artist"]
+ import_file.album = item["album"]
+ import_file.title = item["title"]
+ import_file.flex = "different"
self.importer.default_resolution = self.importer.Resolution.SKIP
self.importer.run()
@@ -1451,34 +1467,42 @@ def add_album_fixture(self, **kwargs):
def match_track_mock(*args, **kwargs):
- return iter([TrackInfo(
- artist='artist', title='title',
- track_id='new trackid', index=0,)])
-
+ return iter(
+ [
+ TrackInfo(
+ artist="artist",
+ title="title",
+ track_id="new trackid",
+ index=0,
+ )
+ ]
+ )
-@patch('beets.autotag.mb.match_track', Mock(side_effect=match_track_mock))
-class ImportDuplicateSingletonTest(unittest.TestCase, TestHelper,
- _common.Assertions):
+@patch("beets.autotag.mb.match_track", Mock(side_effect=match_track_mock))
+class ImportDuplicateSingletonTest(
+ unittest.TestCase, TestHelper, _common.Assertions
+):
def setUp(self):
self.setup_beets()
# Original file in library
- self.add_item_fixture(artist='artist', title='title',
- mb_trackid='old trackid')
+ self.add_item_fixture(
+ artist="artist", title="title", mb_trackid="old trackid"
+ )
# Import session
self.importer = self.create_importer()
- config['import']['autotag'] = True
- config['import']['singletons'] = True
- config['import']['duplicate_keys']['item'] = 'artist title'
+ config["import"]["autotag"] = True
+ config["import"]["singletons"] = True
+ config["import"]["duplicate_keys"]["item"] = "artist title"
def tearDown(self):
self.teardown_beets()
def test_remove_duplicate(self):
item = self.lib.items().get()
- self.assertEqual(item.mb_trackid, 'old trackid')
+ self.assertEqual(item.mb_trackid, "old trackid")
self.assertExists(item.path)
self.importer.default_resolution = self.importer.Resolution.REMOVE
@@ -1487,7 +1511,7 @@ def test_remove_duplicate(self):
self.assertNotExists(item.path)
self.assertEqual(len(self.lib.items()), 1)
item = self.lib.items().get()
- self.assertEqual(item.mb_trackid, 'new trackid')
+ self.assertEqual(item.mb_trackid, "new trackid")
def test_keep_duplicate(self):
self.assertEqual(len(self.lib.items()), 1)
@@ -1499,19 +1523,19 @@ def test_keep_duplicate(self):
def test_skip_duplicate(self):
item = self.lib.items().get()
- self.assertEqual(item.mb_trackid, 'old trackid')
+ self.assertEqual(item.mb_trackid, "old trackid")
self.importer.default_resolution = self.importer.Resolution.SKIP
self.importer.run()
self.assertEqual(len(self.lib.items()), 1)
item = self.lib.items().get()
- self.assertEqual(item.mb_trackid, 'old trackid')
+ self.assertEqual(item.mb_trackid, "old trackid")
def test_keep_when_extra_key_is_different(self):
- config['import']['duplicate_keys']['item'] = 'artist title flex'
+ config["import"]["duplicate_keys"]["item"] = "artist title flex"
item = self.lib.items().get()
- item.flex = 'different'
+ item.flex = "different"
item.store()
self.assertEqual(len(self.lib.items()), 1)
@@ -1521,7 +1545,7 @@ def test_keep_when_extra_key_is_different(self):
self.assertEqual(len(self.lib.items()), 2)
def test_twice_in_import_dir(self):
- self.skipTest('write me')
+ self.skipTest("write me")
def add_item_fixture(self, **kwargs):
# Move this to TestHelper
@@ -1536,72 +1560,72 @@ def test_tag_log_line(self):
sio = StringIO()
handler = logging.StreamHandler(sio)
session = _common.import_session(loghandler=handler)
- session.tag_log('status', 'path')
- self.assertIn('status path', sio.getvalue())
+ session.tag_log("status", "path")
+ self.assertIn("status path", sio.getvalue())
def test_tag_log_unicode(self):
sio = StringIO()
handler = logging.StreamHandler(sio)
session = _common.import_session(loghandler=handler)
- session.tag_log('status', 'caf\xe9') # send unicode
- self.assertIn('status caf\xe9', sio.getvalue())
+ session.tag_log("status", "caf\xe9") # send unicode
+ self.assertIn("status caf\xe9", sio.getvalue())
class ResumeImportTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_beets()
def tearDown(self):
self.teardown_beets()
- @patch('beets.plugins.send')
+ @patch("beets.plugins.send")
def test_resume_album(self, plugins_send):
self.importer = self.create_importer(album_count=2)
- self.config['import']['resume'] = True
+ self.config["import"]["resume"] = True
# Aborts import after one album. This also ensures that we skip
# the first album in the second try.
def raise_exception(event, **kwargs):
- if event == 'album_imported':
+ if event == "album_imported":
raise importer.ImportAbort
+
plugins_send.side_effect = raise_exception
self.importer.run()
self.assertEqual(len(self.lib.albums()), 1)
- self.assertIsNotNone(self.lib.albums('album:album 0').get())
+ self.assertIsNotNone(self.lib.albums("album:album 0").get())
self.importer.run()
self.assertEqual(len(self.lib.albums()), 2)
- self.assertIsNotNone(self.lib.albums('album:album 1').get())
+ self.assertIsNotNone(self.lib.albums("album:album 1").get())
- @patch('beets.plugins.send')
+ @patch("beets.plugins.send")
def test_resume_singleton(self, plugins_send):
self.importer = self.create_importer(item_count=2)
- self.config['import']['resume'] = True
- self.config['import']['singletons'] = True
+ self.config["import"]["resume"] = True
+ self.config["import"]["singletons"] = True
# Aborts import after one track. This also ensures that we skip
# the first album in the second try.
def raise_exception(event, **kwargs):
- if event == 'item_imported':
+ if event == "item_imported":
raise importer.ImportAbort
+
plugins_send.side_effect = raise_exception
self.importer.run()
self.assertEqual(len(self.lib.items()), 1)
- self.assertIsNotNone(self.lib.items('title:track 0').get())
+ self.assertIsNotNone(self.lib.items("title:track 0").get())
self.importer.run()
self.assertEqual(len(self.lib.items()), 2)
- self.assertIsNotNone(self.lib.items('title:track 1').get())
+ self.assertIsNotNone(self.lib.items("title:track 1").get())
class IncrementalImportTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_beets()
- self.config['import']['incremental'] = True
+ self.config["import"]["incremental"] = True
def tearDown(self):
self.teardown_beets()
@@ -1613,7 +1637,7 @@ def test_incremental_album(self):
# Change album name so the original file would be imported again
# if incremental was off.
album = self.lib.albums().get()
- album['album'] = 'edited album'
+ album["album"] = "edited album"
album.store()
importer = self.create_importer(album_count=1)
@@ -1621,14 +1645,14 @@ def test_incremental_album(self):
self.assertEqual(len(self.lib.albums()), 2)
def test_incremental_item(self):
- self.config['import']['singletons'] = True
+ self.config["import"]["singletons"] = True
importer = self.create_importer(item_count=1)
importer.run()
# Change track name so the original file would be imported again
# if incremental was off.
item = self.lib.items().get()
- item['artist'] = 'edited artist'
+ item["artist"] = "edited artist"
item.store()
importer = self.create_importer(item_count=1)
@@ -1637,15 +1661,15 @@ def test_incremental_item(self):
def test_invalid_state_file(self):
importer = self.create_importer()
- with open(self.config['statefile'].as_filename(), 'wb') as f:
- f.write(b'000')
+ with open(self.config["statefile"].as_filename(), "wb") as f:
+ f.write(b"000")
importer.run()
self.assertEqual(len(self.lib.albums()), 1)
def _mkmp3(path):
shutil.copyfile(
- syspath(os.path.join(_common.RSRC, b'min.mp3')),
+ syspath(os.path.join(_common.RSRC, b"min.mp3")),
syspath(path),
)
@@ -1655,20 +1679,20 @@ def setUp(self):
super().setUp()
# create a directory structure for testing
- self.base = os.path.abspath(os.path.join(self.temp_dir, b'tempdir'))
+ self.base = os.path.abspath(os.path.join(self.temp_dir, b"tempdir"))
os.mkdir(syspath(self.base))
- os.mkdir(syspath(os.path.join(self.base, b'album1')))
- os.mkdir(syspath(os.path.join(self.base, b'album2')))
- os.mkdir(syspath(os.path.join(self.base, b'more')))
- os.mkdir(syspath(os.path.join(self.base, b'more', b'album3')))
- os.mkdir(syspath(os.path.join(self.base, b'more', b'album4')))
+ os.mkdir(syspath(os.path.join(self.base, b"album1")))
+ os.mkdir(syspath(os.path.join(self.base, b"album2")))
+ os.mkdir(syspath(os.path.join(self.base, b"more")))
+ os.mkdir(syspath(os.path.join(self.base, b"more", b"album3")))
+ os.mkdir(syspath(os.path.join(self.base, b"more", b"album4")))
- _mkmp3(os.path.join(self.base, b'album1', b'album1song1.mp3'))
- _mkmp3(os.path.join(self.base, b'album1', b'album1song2.mp3'))
- _mkmp3(os.path.join(self.base, b'album2', b'album2song.mp3'))
- _mkmp3(os.path.join(self.base, b'more', b'album3', b'album3song.mp3'))
- _mkmp3(os.path.join(self.base, b'more', b'album4', b'album4song.mp3'))
+ _mkmp3(os.path.join(self.base, b"album1", b"album1song1.mp3"))
+ _mkmp3(os.path.join(self.base, b"album1", b"album1song2.mp3"))
+ _mkmp3(os.path.join(self.base, b"album2", b"album2song.mp3"))
+ _mkmp3(os.path.join(self.base, b"more", b"album3", b"album3song.mp3"))
+ _mkmp3(os.path.join(self.base, b"more", b"album4", b"album4song.mp3"))
def test_finds_all_albums(self):
albums = list(albums_in_dir(self.base))
@@ -1677,16 +1701,16 @@ def test_finds_all_albums(self):
def test_separates_contents(self):
found = []
for _, album in albums_in_dir(self.base):
- found.append(re.search(br'album(.)song', album[0]).group(1))
- self.assertTrue(b'1' in found)
- self.assertTrue(b'2' in found)
- self.assertTrue(b'3' in found)
- self.assertTrue(b'4' in found)
+ found.append(re.search(rb"album(.)song", album[0]).group(1))
+ self.assertTrue(b"1" in found)
+ self.assertTrue(b"2" in found)
+ self.assertTrue(b"3" in found)
+ self.assertTrue(b"4" in found)
def test_finds_multiple_songs(self):
for _, album in albums_in_dir(self.base):
- n = re.search(br'album(.)song', album[0]).group(1)
- if n == b'1':
+ n = re.search(rb"album(.)song", album[0]).group(1)
+ if n == b"1":
self.assertEqual(len(album), 2)
else:
self.assertEqual(len(album), 1)
@@ -1700,47 +1724,47 @@ def create_music(self, files=True, ascii=True):
directories are made). `ascii` indicates ACII-only filenames;
otherwise, we use Unicode names.
"""
- self.base = os.path.abspath(os.path.join(self.temp_dir, b'tempdir'))
+ self.base = os.path.abspath(os.path.join(self.temp_dir, b"tempdir"))
os.mkdir(syspath(self.base))
- name = b'CAT' if ascii else util.bytestring_path('C\xc1T')
- name_alt_case = b'CAt' if ascii else util.bytestring_path('C\xc1t')
+ name = b"CAT" if ascii else util.bytestring_path("C\xc1T")
+ name_alt_case = b"CAt" if ascii else util.bytestring_path("C\xc1t")
self.dirs = [
# Nested album, multiple subdirs.
# Also, false positive marker in root dir, and subtitle for disc 3.
- os.path.join(self.base, b'ABCD1234'),
- os.path.join(self.base, b'ABCD1234', b'cd 1'),
- os.path.join(self.base, b'ABCD1234', b'cd 3 - bonus'),
-
+ os.path.join(self.base, b"ABCD1234"),
+ os.path.join(self.base, b"ABCD1234", b"cd 1"),
+ os.path.join(self.base, b"ABCD1234", b"cd 3 - bonus"),
# Nested album, single subdir.
# Also, punctuation between marker and disc number.
- os.path.join(self.base, b'album'),
- os.path.join(self.base, b'album', b'cd _ 1'),
-
+ os.path.join(self.base, b"album"),
+ os.path.join(self.base, b"album", b"cd _ 1"),
# Flattened album, case typo.
# Also, false positive marker in parent dir.
- os.path.join(self.base, b'artist [CD5]'),
- os.path.join(self.base, b'artist [CD5]', name + b' disc 1'),
- os.path.join(self.base, b'artist [CD5]',
- name_alt_case + b' disc 2'),
-
+ os.path.join(self.base, b"artist [CD5]"),
+ os.path.join(self.base, b"artist [CD5]", name + b" disc 1"),
+ os.path.join(
+ self.base, b"artist [CD5]", name_alt_case + b" disc 2"
+ ),
# Single disc album, sorted between CAT discs.
- os.path.join(self.base, b'artist [CD5]', name + b'S'),
+ os.path.join(self.base, b"artist [CD5]", name + b"S"),
]
self.files = [
- os.path.join(self.base, b'ABCD1234', b'cd 1', b'song1.mp3'),
- os.path.join(self.base, b'ABCD1234',
- b'cd 3 - bonus', b'song2.mp3'),
- os.path.join(self.base, b'ABCD1234',
- b'cd 3 - bonus', b'song3.mp3'),
- os.path.join(self.base, b'album', b'cd _ 1', b'song4.mp3'),
- os.path.join(self.base, b'artist [CD5]', name + b' disc 1',
- b'song5.mp3'),
- os.path.join(self.base, b'artist [CD5]',
- name_alt_case + b' disc 2', b'song6.mp3'),
- os.path.join(self.base, b'artist [CD5]', name + b'S',
- b'song7.mp3'),
+ os.path.join(self.base, b"ABCD1234", b"cd 1", b"song1.mp3"),
+ os.path.join(self.base, b"ABCD1234", b"cd 3 - bonus", b"song2.mp3"),
+ os.path.join(self.base, b"ABCD1234", b"cd 3 - bonus", b"song3.mp3"),
+ os.path.join(self.base, b"album", b"cd _ 1", b"song4.mp3"),
+ os.path.join(
+ self.base, b"artist [CD5]", name + b" disc 1", b"song5.mp3"
+ ),
+ os.path.join(
+ self.base,
+ b"artist [CD5]",
+ name_alt_case + b" disc 2",
+ b"song6.mp3",
+ ),
+ os.path.join(self.base, b"artist [CD5]", name + b"S", b"song7.mp3"),
]
if not ascii:
@@ -1757,10 +1781,10 @@ def _normalize_path(self, path):
"""Normalize a path's Unicode combining form according to the
platform.
"""
- path = path.decode('utf-8')
- norm_form = 'NFD' if sys.platform == 'darwin' else 'NFC'
+ path = path.decode("utf-8")
+ norm_form = "NFD" if sys.platform == "darwin" else "NFC"
path = unicodedata.normalize(norm_form, path)
- return path.encode('utf-8')
+ return path.encode("utf-8")
def test_coalesce_nested_album_multiple_subdirs(self):
self.create_music()
@@ -1827,11 +1851,11 @@ def setUp(self):
# The existing album.
album = self.add_album_fixture()
album.added = 4242.0
- album.foo = 'bar' # Some flexible attribute.
- album.data_source = 'original_source'
+ album.foo = "bar" # Some flexible attribute.
+ album.data_source = "original_source"
album.store()
item = album.items().get()
- item.baz = 'qux'
+ item.baz = "qux"
item.added = 4747.0
item.store()
@@ -1855,14 +1879,14 @@ def _item(self):
def test_reimported_album_gets_new_metadata(self):
self._setup_session()
- self.assertEqual(self._album().album, '\xe4lbum')
+ self.assertEqual(self._album().album, "\xe4lbum")
self.importer.run()
- self.assertEqual(self._album().album, 'the album')
+ self.assertEqual(self._album().album, "the album")
def test_reimported_album_preserves_flexattr(self):
self._setup_session()
self.importer.run()
- self.assertEqual(self._album().foo, 'bar')
+ self.assertEqual(self._album().foo, "bar")
def test_reimported_album_preserves_added(self):
self._setup_session()
@@ -1872,7 +1896,7 @@ def test_reimported_album_preserves_added(self):
def test_reimported_album_preserves_item_flexattr(self):
self._setup_session()
self.importer.run()
- self.assertEqual(self._item().baz, 'qux')
+ self.assertEqual(self._item().baz, "qux")
def test_reimported_album_preserves_item_added(self):
self._setup_session()
@@ -1881,14 +1905,14 @@ def test_reimported_album_preserves_item_added(self):
def test_reimported_item_gets_new_metadata(self):
self._setup_session(True)
- self.assertEqual(self._item().title, 't\xeftle 0')
+ self.assertEqual(self._item().title, "t\xeftle 0")
self.importer.run()
- self.assertEqual(self._item().title, 'full')
+ self.assertEqual(self._item().title, "full")
def test_reimported_item_preserves_flexattr(self):
self._setup_session(True)
self.importer.run()
- self.assertEqual(self._item().baz, 'qux')
+ self.assertEqual(self._item().baz, "qux")
def test_reimported_item_preserves_added(self):
self._setup_session(True)
@@ -1897,7 +1921,7 @@ def test_reimported_item_preserves_added(self):
def test_reimported_item_preserves_art(self):
self._setup_session()
- art_source = os.path.join(_common.RSRC, b'abbey.jpg')
+ art_source = os.path.join(_common.RSRC, b"abbey.jpg")
replaced_album = self._album()
replaced_album.set_art(art_source)
replaced_album.store()
@@ -1912,16 +1936,15 @@ def test_reimported_item_preserves_art(self):
def test_reimported_album_not_preserves_flexattr(self):
self._setup_session()
- self.assertEqual(self._album().data_source, 'original_source')
+ self.assertEqual(self._album().data_source, "original_source")
self.importer.run()
- self.assertEqual(self._album().data_source, 'match_source')
+ self.assertEqual(self._album().data_source, "match_source")
class ImportPretendTest(_common.TestCase, ImportHelper):
- """ Test the pretend commandline option
- """
+ """Test the pretend commandline option"""
- def __init__(self, method_name='runTest'):
+ def __init__(self, method_name="runTest"):
super().__init__(method_name)
self.matcher = None
@@ -1931,7 +1954,7 @@ def setUp(self):
self.__create_import_dir()
self.__create_empty_import_dir()
self._setup_import_session()
- config['import']['pretend'] = True
+ config["import"]["pretend"] = True
self.matcher = AutotagStub().install()
self.io.install()
@@ -1941,21 +1964,22 @@ def tearDown(self):
def __create_import_dir(self):
self._create_import_dir(1)
- resource_path = os.path.join(_common.RSRC, b'empty.mp3')
- single_path = os.path.join(self.import_dir, b'track_2.mp3')
+ resource_path = os.path.join(_common.RSRC, b"empty.mp3")
+ single_path = os.path.join(self.import_dir, b"track_2.mp3")
shutil.copy(syspath(resource_path), syspath(single_path))
self.import_paths = [
- os.path.join(self.import_dir, b'the_album'),
- single_path
+ os.path.join(self.import_dir, b"the_album"),
+ single_path,
]
self.import_files = [
displayable_path(
- os.path.join(self.import_paths[0], b'track_1.mp3')),
- displayable_path(single_path)
+ os.path.join(self.import_paths[0], b"track_1.mp3")
+ ),
+ displayable_path(single_path),
]
def __create_empty_import_dir(self):
- path = os.path.join(self.temp_dir, b'empty')
+ path = os.path.join(self.temp_dir, b"empty")
os.makedirs(syspath(path))
self.empty_path = path
@@ -1966,7 +1990,7 @@ def __run(self, import_paths, singletons=True):
with capture_log() as logs:
self.importer.run()
- logs = [line for line in logs if not line.startswith('Sending event:')]
+ logs = [line for line in logs if not line.startswith("Sending event:")]
self.assertEqual(len(self.lib.items()), 0)
self.assertEqual(len(self.lib.albums()), 0)
@@ -1976,111 +2000,149 @@ def __run(self, import_paths, singletons=True):
def test_import_singletons_pretend(self):
logs = self.__run(self.import_paths)
- self.assertEqual(logs, [
- 'Singleton: %s' % displayable_path(self.import_files[0]),
- 'Singleton: %s' % displayable_path(self.import_paths[1])])
+ self.assertEqual(
+ logs,
+ [
+ "Singleton: %s" % displayable_path(self.import_files[0]),
+ "Singleton: %s" % displayable_path(self.import_paths[1]),
+ ],
+ )
def test_import_album_pretend(self):
logs = self.__run(self.import_paths, singletons=False)
- self.assertEqual(logs, [
- 'Album: %s' % displayable_path(self.import_paths[0]),
- ' %s' % displayable_path(self.import_files[0]),
- 'Album: %s' % displayable_path(self.import_paths[1]),
- ' %s' % displayable_path(self.import_paths[1])])
+ self.assertEqual(
+ logs,
+ [
+ "Album: %s" % displayable_path(self.import_paths[0]),
+ " %s" % displayable_path(self.import_files[0]),
+ "Album: %s" % displayable_path(self.import_paths[1]),
+ " %s" % displayable_path(self.import_paths[1]),
+ ],
+ )
def test_import_pretend_empty(self):
logs = self.__run([self.empty_path])
- self.assertEqual(logs, ['No files imported from {}'
- .format(displayable_path(self.empty_path))])
+ self.assertEqual(
+ logs,
+ [
+ "No files imported from {}".format(
+ displayable_path(self.empty_path)
+ )
+ ],
+ )
+
# Helpers for ImportMusicBrainzIdTest.
-def mocked_get_release_by_id(id_, includes=[], release_status=[],
- release_type=[]):
+def mocked_get_release_by_id(
+ id_, includes=[], release_status=[], release_type=[]
+):
"""Mimic musicbrainzngs.get_release_by_id, accepting only a restricted list
of MB ids (ID_RELEASE_0, ID_RELEASE_1). The returned dict differs only in
the release title and artist name, so that ID_RELEASE_0 is a closer match
to the items created by ImportHelper._create_import_dir()."""
# Map IDs to (release title, artist), so the distances are different.
- releases = {ImportMusicBrainzIdTest.ID_RELEASE_0: ('VALID_RELEASE_0',
- 'TAG ARTIST'),
- ImportMusicBrainzIdTest.ID_RELEASE_1: ('VALID_RELEASE_1',
- 'DISTANT_MATCH')}
+ releases = {
+ ImportMusicBrainzIdTest.ID_RELEASE_0: ("VALID_RELEASE_0", "TAG ARTIST"),
+ ImportMusicBrainzIdTest.ID_RELEASE_1: (
+ "VALID_RELEASE_1",
+ "DISTANT_MATCH",
+ ),
+ }
return {
- 'release': {
- 'title': releases[id_][0],
- 'id': id_,
- 'medium-list': [{
- 'track-list': [{
- 'id': 'baz',
- 'recording': {
- 'title': 'foo',
- 'id': 'bar',
- 'length': 59,
+ "release": {
+ "title": releases[id_][0],
+ "id": id_,
+ "medium-list": [
+ {
+ "track-list": [
+ {
+ "id": "baz",
+ "recording": {
+ "title": "foo",
+ "id": "bar",
+ "length": 59,
+ },
+ "position": 9,
+ "number": "A2",
+ }
+ ],
+ "position": 5,
+ }
+ ],
+ "artist-credit": [
+ {
+ "artist": {
+ "name": releases[id_][1],
+ "id": "some-id",
},
- 'position': 9,
- 'number': 'A2'
- }],
- 'position': 5,
- }],
- 'artist-credit': [{
- 'artist': {
- 'name': releases[id_][1],
- 'id': 'some-id',
- },
- }],
- 'release-group': {
- 'id': 'another-id',
+ }
+ ],
+ "release-group": {
+ "id": "another-id",
},
- 'status': 'Official',
+ "status": "Official",
}
}
-def mocked_get_recording_by_id(id_, includes=[], release_status=[],
- release_type=[]):
+def mocked_get_recording_by_id(
+ id_, includes=[], release_status=[], release_type=[]
+):
"""Mimic musicbrainzngs.get_recording_by_id, accepting only a restricted
list of MB ids (ID_RECORDING_0, ID_RECORDING_1). The returned dict differs
only in the recording title and artist name, so that ID_RECORDING_0 is a
closer match to the items created by ImportHelper._create_import_dir()."""
# Map IDs to (recording title, artist), so the distances are different.
- releases = {ImportMusicBrainzIdTest.ID_RECORDING_0: ('VALID_RECORDING_0',
- 'TAG ARTIST'),
- ImportMusicBrainzIdTest.ID_RECORDING_1: ('VALID_RECORDING_1',
- 'DISTANT_MATCH')}
+ releases = {
+ ImportMusicBrainzIdTest.ID_RECORDING_0: (
+ "VALID_RECORDING_0",
+ "TAG ARTIST",
+ ),
+ ImportMusicBrainzIdTest.ID_RECORDING_1: (
+ "VALID_RECORDING_1",
+ "DISTANT_MATCH",
+ ),
+ }
return {
- 'recording': {
- 'title': releases[id_][0],
- 'id': id_,
- 'length': 59,
- 'artist-credit': [{
- 'artist': {
- 'name': releases[id_][1],
- 'id': 'some-id',
- },
- }],
+ "recording": {
+ "title": releases[id_][0],
+ "id": id_,
+ "length": 59,
+ "artist-credit": [
+ {
+ "artist": {
+ "name": releases[id_][1],
+ "id": "some-id",
+ },
+ }
+ ],
}
}
-@patch('musicbrainzngs.get_recording_by_id',
- Mock(side_effect=mocked_get_recording_by_id))
-@patch('musicbrainzngs.get_release_by_id',
- Mock(side_effect=mocked_get_release_by_id))
+@patch(
+ "musicbrainzngs.get_recording_by_id",
+ Mock(side_effect=mocked_get_recording_by_id),
+)
+@patch(
+ "musicbrainzngs.get_release_by_id",
+ Mock(side_effect=mocked_get_release_by_id),
+)
class ImportMusicBrainzIdTest(_common.TestCase, ImportHelper):
"""Test the --musicbrainzid argument."""
- MB_RELEASE_PREFIX = 'https://musicbrainz.org/release/'
- MB_RECORDING_PREFIX = 'https://musicbrainz.org/recording/'
- ID_RELEASE_0 = '00000000-0000-0000-0000-000000000000'
- ID_RELEASE_1 = '11111111-1111-1111-1111-111111111111'
- ID_RECORDING_0 = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
- ID_RECORDING_1 = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'
+ MB_RELEASE_PREFIX = "https://musicbrainz.org/release/"
+ MB_RECORDING_PREFIX = "https://musicbrainz.org/recording/"
+ ID_RELEASE_0 = "00000000-0000-0000-0000-000000000000"
+ ID_RELEASE_1 = "11111111-1111-1111-1111-111111111111"
+ ID_RECORDING_0 = "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"
+ ID_RECORDING_1 = "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb"
def setUp(self):
self.setup_beets()
@@ -2090,73 +2152,87 @@ def tearDown(self):
self.teardown_beets()
def test_one_mbid_one_album(self):
- self.config['import']['search_ids'] = \
- [self.MB_RELEASE_PREFIX + self.ID_RELEASE_0]
+ self.config["import"]["search_ids"] = [
+ self.MB_RELEASE_PREFIX + self.ID_RELEASE_0
+ ]
self._setup_import_session()
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assertEqual(self.lib.albums().get().album, 'VALID_RELEASE_0')
+ self.assertEqual(self.lib.albums().get().album, "VALID_RELEASE_0")
def test_several_mbid_one_album(self):
- self.config['import']['search_ids'] = \
- [self.MB_RELEASE_PREFIX + self.ID_RELEASE_0,
- self.MB_RELEASE_PREFIX + self.ID_RELEASE_1]
+ self.config["import"]["search_ids"] = [
+ self.MB_RELEASE_PREFIX + self.ID_RELEASE_0,
+ self.MB_RELEASE_PREFIX + self.ID_RELEASE_1,
+ ]
self._setup_import_session()
self.importer.add_choice(2) # Pick the 2nd best match (release 1).
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assertEqual(self.lib.albums().get().album, 'VALID_RELEASE_1')
+ self.assertEqual(self.lib.albums().get().album, "VALID_RELEASE_1")
def test_one_mbid_one_singleton(self):
- self.config['import']['search_ids'] = \
- [self.MB_RECORDING_PREFIX + self.ID_RECORDING_0]
+ self.config["import"]["search_ids"] = [
+ self.MB_RECORDING_PREFIX + self.ID_RECORDING_0
+ ]
self._setup_import_session(singletons=True)
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assertEqual(self.lib.items().get().title, 'VALID_RECORDING_0')
+ self.assertEqual(self.lib.items().get().title, "VALID_RECORDING_0")
def test_several_mbid_one_singleton(self):
- self.config['import']['search_ids'] = \
- [self.MB_RECORDING_PREFIX + self.ID_RECORDING_0,
- self.MB_RECORDING_PREFIX + self.ID_RECORDING_1]
+ self.config["import"]["search_ids"] = [
+ self.MB_RECORDING_PREFIX + self.ID_RECORDING_0,
+ self.MB_RECORDING_PREFIX + self.ID_RECORDING_1,
+ ]
self._setup_import_session(singletons=True)
self.importer.add_choice(2) # Pick the 2nd best match (recording 1).
self.importer.add_choice(importer.action.APPLY)
self.importer.run()
- self.assertEqual(self.lib.items().get().title, 'VALID_RECORDING_1')
+ self.assertEqual(self.lib.items().get().title, "VALID_RECORDING_1")
def test_candidates_album(self):
"""Test directly ImportTask.lookup_candidates()."""
- task = importer.ImportTask(paths=self.import_dir,
- toppath='top path',
- items=[_common.item()])
- task.search_ids = [self.MB_RELEASE_PREFIX + self.ID_RELEASE_0,
- self.MB_RELEASE_PREFIX + self.ID_RELEASE_1,
- 'an invalid and discarded id']
+ task = importer.ImportTask(
+ paths=self.import_dir, toppath="top path", items=[_common.item()]
+ )
+ task.search_ids = [
+ self.MB_RELEASE_PREFIX + self.ID_RELEASE_0,
+ self.MB_RELEASE_PREFIX + self.ID_RELEASE_1,
+ "an invalid and discarded id",
+ ]
task.lookup_candidates()
- self.assertEqual({'VALID_RELEASE_0', 'VALID_RELEASE_1'},
- {c.info.album for c in task.candidates})
+ self.assertEqual(
+ {"VALID_RELEASE_0", "VALID_RELEASE_1"},
+ {c.info.album for c in task.candidates},
+ )
def test_candidates_singleton(self):
"""Test directly SingletonImportTask.lookup_candidates()."""
- task = importer.SingletonImportTask(toppath='top path',
- item=_common.item())
- task.search_ids = [self.MB_RECORDING_PREFIX + self.ID_RECORDING_0,
- self.MB_RECORDING_PREFIX + self.ID_RECORDING_1,
- 'an invalid and discarded id']
+ task = importer.SingletonImportTask(
+ toppath="top path", item=_common.item()
+ )
+ task.search_ids = [
+ self.MB_RECORDING_PREFIX + self.ID_RECORDING_0,
+ self.MB_RECORDING_PREFIX + self.ID_RECORDING_1,
+ "an invalid and discarded id",
+ ]
task.lookup_candidates()
- self.assertEqual({'VALID_RECORDING_0', 'VALID_RECORDING_1'},
- {c.info.title for c in task.candidates})
+ self.assertEqual(
+ {"VALID_RECORDING_0", "VALID_RECORDING_1"},
+ {c.info.title for c in task.candidates},
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_library.py b/test/test_library.py
index 651f846784..bf59bed222 100644
--- a/test/test_library.py
+++ b/test/test_library.py
@@ -17,25 +17,24 @@
import os
import os.path
-import stat
-import shutil
import re
-import unicodedata
+import shutil
+import stat
import sys
import time
+import unicodedata
import unittest
-
from test import _common
from test._common import item
-import beets.library
-import beets.dbcore.query
-from beets import util
-from beets import plugins
-from beets import config
-from mediafile import MediaFile, UnreadableFileError
-from beets.util import syspath, bytestring_path
from test.helper import TestHelper
+from mediafile import MediaFile, UnreadableFileError
+
+import beets.dbcore.query
+import beets.library
+from beets import config, plugins, util
+from beets.util import bytestring_path, syspath
+
# Shortcut to path normalization.
np = util.normpath
@@ -43,39 +42,43 @@
class LoadTest(_common.LibTestCase):
def test_load_restores_data_from_db(self):
original_title = self.i.title
- self.i.title = 'something'
+ self.i.title = "something"
self.i.load()
self.assertEqual(original_title, self.i.title)
def test_load_clears_dirty_flags(self):
- self.i.artist = 'something'
- self.assertTrue('artist' in self.i._dirty)
+ self.i.artist = "something"
+ self.assertTrue("artist" in self.i._dirty)
self.i.load()
- self.assertTrue('artist' not in self.i._dirty)
+ self.assertTrue("artist" not in self.i._dirty)
class StoreTest(_common.LibTestCase):
def test_store_changes_database_value(self):
self.i.year = 1987
self.i.store()
- new_year = self.lib._connection().execute(
- 'select year from items where '
- 'title="the title"').fetchone()['year']
+ new_year = (
+ self.lib._connection()
+ .execute("select year from items where " 'title="the title"')
+ .fetchone()["year"]
+ )
self.assertEqual(new_year, 1987)
def test_store_only_writes_dirty_fields(self):
original_genre = self.i.genre
- self.i._values_fixed['genre'] = 'beatboxing' # change w/o dirtying
+ self.i._values_fixed["genre"] = "beatboxing" # change w/o dirtying
self.i.store()
- new_genre = self.lib._connection().execute(
- 'select genre from items where '
- 'title="the title"').fetchone()['genre']
+ new_genre = (
+ self.lib._connection()
+ .execute("select genre from items where " 'title="the title"')
+ .fetchone()["genre"]
+ )
self.assertEqual(new_genre, original_genre)
def test_store_clears_dirty_flags(self):
- self.i.composer = 'tvp'
+ self.i.composer = "tvp"
self.i.store()
- self.assertTrue('composer' not in self.i._dirty)
+ self.assertTrue("composer" not in self.i._dirty)
def test_store_album_cascades_flex_deletes(self):
album = _common.album()
@@ -87,38 +90,46 @@ def test_store_album_cascades_flex_deletes(self):
self.lib.add(item)
del album.flex1
album.store()
- self.assertNotIn('flex1', album)
- self.assertNotIn('flex1', album.items()[0])
+ self.assertNotIn("flex1", album)
+ self.assertNotIn("flex1", album.items()[0])
class AddTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.lib = beets.library.Library(':memory:')
+ self.lib = beets.library.Library(":memory:")
self.i = item()
def test_item_add_inserts_row(self):
self.lib.add(self.i)
- new_grouping = self.lib._connection().execute(
- 'select grouping from items '
- 'where composer="the composer"').fetchone()['grouping']
+ new_grouping = (
+ self.lib._connection()
+ .execute(
+ "select grouping from items " 'where composer="the composer"'
+ )
+ .fetchone()["grouping"]
+ )
self.assertEqual(new_grouping, self.i.grouping)
def test_library_add_path_inserts_row(self):
i = beets.library.Item.from_path(
- os.path.join(_common.RSRC, b'full.mp3')
+ os.path.join(_common.RSRC, b"full.mp3")
)
self.lib.add(i)
- new_grouping = self.lib._connection().execute(
- 'select grouping from items '
- 'where composer="the composer"').fetchone()['grouping']
+ new_grouping = (
+ self.lib._connection()
+ .execute(
+ "select grouping from items " 'where composer="the composer"'
+ )
+ .fetchone()["grouping"]
+ )
self.assertEqual(new_grouping, self.i.grouping)
class RemoveTest(_common.LibTestCase):
def test_remove_deletes_from_db(self):
self.i.remove()
- c = self.lib._connection().execute('select * from items')
+ c = self.lib._connection().execute("select * from items")
self.assertEqual(c.fetchone(), None)
@@ -133,29 +144,29 @@ def test_set_changes_value(self):
def test_set_sets_dirty_flag(self):
self.i.comp = not self.i.comp
- self.assertTrue('comp' in self.i._dirty)
+ self.assertTrue("comp" in self.i._dirty)
def test_set_does_not_dirty_if_value_unchanged(self):
self.i.title = self.i.title
- self.assertTrue('title' not in self.i._dirty)
+ self.assertTrue("title" not in self.i._dirty)
def test_invalid_field_raises_attributeerror(self):
- self.assertRaises(AttributeError, getattr, self.i, 'xyzzy')
+ self.assertRaises(AttributeError, getattr, self.i, "xyzzy")
def test_album_fallback(self):
# integration test of item-album fallback
- lib = beets.library.Library(':memory:')
+ lib = beets.library.Library(":memory:")
i = item(lib)
album = lib.add_album([i])
- album['flex'] = 'foo'
+ album["flex"] = "foo"
album.store()
- self.assertTrue('flex' in i)
- self.assertFalse('flex' in i.keys(with_album=False))
- self.assertEqual(i['flex'], 'foo')
- self.assertEqual(i.get('flex'), 'foo')
- self.assertEqual(i.get('flex', with_album=False), None)
- self.assertEqual(i.get('flexx'), None)
+ self.assertTrue("flex" in i)
+ self.assertFalse("flex" in i.keys(with_album=False))
+ self.assertEqual(i["flex"], "foo")
+ self.assertEqual(i.get("flex"), "foo")
+ self.assertEqual(i.get("flex", with_album=False), None)
+ self.assertEqual(i.get("flexx"), None)
class DestinationTest(_common.TestCase):
@@ -164,7 +175,7 @@ def setUp(self):
# default directory is ~/Music and the only reason why it was switched
# to ~/.Music is to confirm that tests works well when path to
# temporary directory contains .
- self.lib = beets.library.Library(':memory:', '~/.Music')
+ self.lib = beets.library.Library(":memory:", "~/.Music")
self.i = item(self.lib)
def tearDown(self):
@@ -176,441 +187,434 @@ def tearDown(self):
config.read(user=False, defaults=True)
def test_directory_works_with_trailing_slash(self):
- self.lib.directory = b'one/'
- self.lib.path_formats = [('default', 'two')]
- self.assertEqual(self.i.destination(), np('one/two'))
+ self.lib.directory = b"one/"
+ self.lib.path_formats = [("default", "two")]
+ self.assertEqual(self.i.destination(), np("one/two"))
def test_directory_works_without_trailing_slash(self):
- self.lib.directory = b'one'
- self.lib.path_formats = [('default', 'two')]
- self.assertEqual(self.i.destination(), np('one/two'))
+ self.lib.directory = b"one"
+ self.lib.path_formats = [("default", "two")]
+ self.assertEqual(self.i.destination(), np("one/two"))
def test_destination_substitutes_metadata_values(self):
- self.lib.directory = b'base'
- self.lib.path_formats = [('default', '$album/$artist $title')]
- self.i.title = 'three'
- self.i.artist = 'two'
- self.i.album = 'one'
- self.assertEqual(self.i.destination(),
- np('base/one/two three'))
+ self.lib.directory = b"base"
+ self.lib.path_formats = [("default", "$album/$artist $title")]
+ self.i.title = "three"
+ self.i.artist = "two"
+ self.i.album = "one"
+ self.assertEqual(self.i.destination(), np("base/one/two three"))
def test_destination_preserves_extension(self):
- self.lib.directory = b'base'
- self.lib.path_formats = [('default', '$title')]
- self.i.path = 'hey.audioformat'
- self.assertEqual(self.i.destination(),
- np('base/the title.audioformat'))
+ self.lib.directory = b"base"
+ self.lib.path_formats = [("default", "$title")]
+ self.i.path = "hey.audioformat"
+ self.assertEqual(self.i.destination(), np("base/the title.audioformat"))
def test_lower_case_extension(self):
- self.lib.directory = b'base'
- self.lib.path_formats = [('default', '$title')]
- self.i.path = 'hey.MP3'
- self.assertEqual(self.i.destination(),
- np('base/the title.mp3'))
+ self.lib.directory = b"base"
+ self.lib.path_formats = [("default", "$title")]
+ self.i.path = "hey.MP3"
+ self.assertEqual(self.i.destination(), np("base/the title.mp3"))
def test_destination_pads_some_indices(self):
- self.lib.directory = b'base'
- self.lib.path_formats = [('default',
- '$track $tracktotal $disc $disctotal $bpm')]
+ self.lib.directory = b"base"
+ self.lib.path_formats = [
+ ("default", "$track $tracktotal $disc $disctotal $bpm")
+ ]
self.i.track = 1
self.i.tracktotal = 2
self.i.disc = 3
self.i.disctotal = 4
self.i.bpm = 5
- self.assertEqual(self.i.destination(),
- np('base/01 02 03 04 5'))
+ self.assertEqual(self.i.destination(), np("base/01 02 03 04 5"))
def test_destination_pads_date_values(self):
- self.lib.directory = b'base'
- self.lib.path_formats = [('default', '$year-$month-$day')]
+ self.lib.directory = b"base"
+ self.lib.path_formats = [("default", "$year-$month-$day")]
self.i.year = 1
self.i.month = 2
self.i.day = 3
- self.assertEqual(self.i.destination(),
- np('base/0001-02-03'))
+ self.assertEqual(self.i.destination(), np("base/0001-02-03"))
def test_destination_escapes_slashes(self):
- self.i.album = 'one/two'
+ self.i.album = "one/two"
dest = self.i.destination()
- self.assertTrue(b'one' in dest)
- self.assertTrue(b'two' in dest)
- self.assertFalse(b'one/two' in dest)
+ self.assertTrue(b"one" in dest)
+ self.assertTrue(b"two" in dest)
+ self.assertFalse(b"one/two" in dest)
def test_destination_escapes_leading_dot(self):
- self.i.album = '.something'
+ self.i.album = ".something"
dest = self.i.destination()
- self.assertTrue(b'something' in dest)
- self.assertFalse(b'/.something' in dest)
+ self.assertTrue(b"something" in dest)
+ self.assertFalse(b"/.something" in dest)
def test_destination_preserves_legitimate_slashes(self):
- self.i.artist = 'one'
- self.i.album = 'two'
+ self.i.artist = "one"
+ self.i.album = "two"
dest = self.i.destination()
- self.assertTrue(os.path.join(b'one', b'two') in dest)
+ self.assertTrue(os.path.join(b"one", b"two") in dest)
def test_destination_long_names_truncated(self):
- self.i.title = 'X' * 300
- self.i.artist = 'Y' * 300
+ self.i.title = "X" * 300
+ self.i.artist = "Y" * 300
for c in self.i.destination().split(util.PATH_SEP):
self.assertTrue(len(c) <= 255)
def test_destination_long_names_keep_extension(self):
- self.i.title = 'X' * 300
- self.i.path = b'something.extn'
+ self.i.title = "X" * 300
+ self.i.path = b"something.extn"
dest = self.i.destination()
- self.assertEqual(dest[-5:], b'.extn')
+ self.assertEqual(dest[-5:], b".extn")
def test_distination_windows_removes_both_separators(self):
- self.i.title = 'one \\ two / three.mp3'
+ self.i.title = "one \\ two / three.mp3"
with _common.platform_windows():
p = self.i.destination()
- self.assertFalse(b'one \\ two' in p)
- self.assertFalse(b'one / two' in p)
- self.assertFalse(b'two \\ three' in p)
- self.assertFalse(b'two / three' in p)
+ self.assertFalse(b"one \\ two" in p)
+ self.assertFalse(b"one / two" in p)
+ self.assertFalse(b"two \\ three" in p)
+ self.assertFalse(b"two / three" in p)
def test_path_with_format(self):
- self.lib.path_formats = [('default', '$artist/$album ($format)')]
+ self.lib.path_formats = [("default", "$artist/$album ($format)")]
p = self.i.destination()
- self.assertTrue(b'(FLAC)' in p)
+ self.assertTrue(b"(FLAC)" in p)
def test_heterogeneous_album_gets_single_directory(self):
i1, i2 = item(), item()
self.lib.add_album([i1, i2])
i1.year, i2.year = 2009, 2010
- self.lib.path_formats = [('default', '$album ($year)/$track $title')]
+ self.lib.path_formats = [("default", "$album ($year)/$track $title")]
dest1, dest2 = i1.destination(), i2.destination()
self.assertEqual(os.path.dirname(dest1), os.path.dirname(dest2))
def test_default_path_for_non_compilations(self):
self.i.comp = False
self.lib.add_album([self.i])
- self.lib.directory = b'one'
- self.lib.path_formats = [('default', 'two'),
- ('comp:true', 'three')]
- self.assertEqual(self.i.destination(), np('one/two'))
+ self.lib.directory = b"one"
+ self.lib.path_formats = [("default", "two"), ("comp:true", "three")]
+ self.assertEqual(self.i.destination(), np("one/two"))
def test_singleton_path(self):
i = item(self.lib)
- self.lib.directory = b'one'
+ self.lib.directory = b"one"
self.lib.path_formats = [
- ('default', 'two'),
- ('singleton:true', 'four'),
- ('comp:true', 'three'),
+ ("default", "two"),
+ ("singleton:true", "four"),
+ ("comp:true", "three"),
]
- self.assertEqual(i.destination(), np('one/four'))
+ self.assertEqual(i.destination(), np("one/four"))
def test_comp_before_singleton_path(self):
i = item(self.lib)
i.comp = True
- self.lib.directory = b'one'
+ self.lib.directory = b"one"
self.lib.path_formats = [
- ('default', 'two'),
- ('comp:true', 'three'),
- ('singleton:true', 'four'),
+ ("default", "two"),
+ ("comp:true", "three"),
+ ("singleton:true", "four"),
]
- self.assertEqual(i.destination(), np('one/three'))
+ self.assertEqual(i.destination(), np("one/three"))
def test_comp_path(self):
self.i.comp = True
self.lib.add_album([self.i])
- self.lib.directory = b'one'
+ self.lib.directory = b"one"
self.lib.path_formats = [
- ('default', 'two'),
- ('comp:true', 'three'),
+ ("default", "two"),
+ ("comp:true", "three"),
]
- self.assertEqual(self.i.destination(), np('one/three'))
+ self.assertEqual(self.i.destination(), np("one/three"))
def test_albumtype_query_path(self):
self.i.comp = True
self.lib.add_album([self.i])
- self.i.albumtype = 'sometype'
- self.lib.directory = b'one'
+ self.i.albumtype = "sometype"
+ self.lib.directory = b"one"
self.lib.path_formats = [
- ('default', 'two'),
- ('albumtype:sometype', 'four'),
- ('comp:true', 'three'),
+ ("default", "two"),
+ ("albumtype:sometype", "four"),
+ ("comp:true", "three"),
]
- self.assertEqual(self.i.destination(), np('one/four'))
+ self.assertEqual(self.i.destination(), np("one/four"))
def test_albumtype_path_fallback_to_comp(self):
self.i.comp = True
self.lib.add_album([self.i])
- self.i.albumtype = 'sometype'
- self.lib.directory = b'one'
+ self.i.albumtype = "sometype"
+ self.lib.directory = b"one"
self.lib.path_formats = [
- ('default', 'two'),
- ('albumtype:anothertype', 'four'),
- ('comp:true', 'three'),
+ ("default", "two"),
+ ("albumtype:anothertype", "four"),
+ ("comp:true", "three"),
]
- self.assertEqual(self.i.destination(), np('one/three'))
+ self.assertEqual(self.i.destination(), np("one/three"))
def test_get_formatted_does_not_replace_separators(self):
with _common.platform_posix():
- name = os.path.join('a', 'b')
+ name = os.path.join("a", "b")
self.i.title = name
- newname = self.i.formatted().get('title')
+ newname = self.i.formatted().get("title")
self.assertEqual(name, newname)
def test_get_formatted_pads_with_zero(self):
with _common.platform_posix():
self.i.track = 1
- name = self.i.formatted().get('track')
- self.assertTrue(name.startswith('0'))
+ name = self.i.formatted().get("track")
+ self.assertTrue(name.startswith("0"))
def test_get_formatted_uses_kbps_bitrate(self):
with _common.platform_posix():
self.i.bitrate = 12345
- val = self.i.formatted().get('bitrate')
- self.assertEqual(val, '12kbps')
+ val = self.i.formatted().get("bitrate")
+ self.assertEqual(val, "12kbps")
def test_get_formatted_uses_khz_samplerate(self):
with _common.platform_posix():
self.i.samplerate = 12345
- val = self.i.formatted().get('samplerate')
- self.assertEqual(val, '12kHz')
+ val = self.i.formatted().get("samplerate")
+ self.assertEqual(val, "12kHz")
def test_get_formatted_datetime(self):
with _common.platform_posix():
self.i.added = 1368302461.210265
- val = self.i.formatted().get('added')
- self.assertTrue(val.startswith('2013'))
+ val = self.i.formatted().get("added")
+ self.assertTrue(val.startswith("2013"))
def test_get_formatted_none(self):
with _common.platform_posix():
self.i.some_other_field = None
- val = self.i.formatted().get('some_other_field')
- self.assertEqual(val, '')
+ val = self.i.formatted().get("some_other_field")
+ self.assertEqual(val, "")
def test_artist_falls_back_to_albumartist(self):
- self.i.artist = ''
- self.i.albumartist = 'something'
- self.lib.path_formats = [('default', '$artist')]
+ self.i.artist = ""
+ self.i.albumartist = "something"
+ self.lib.path_formats = [("default", "$artist")]
p = self.i.destination()
- self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b'something')
+ self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b"something")
def test_albumartist_falls_back_to_artist(self):
- self.i.artist = 'trackartist'
- self.i.albumartist = ''
- self.lib.path_formats = [('default', '$albumartist')]
+ self.i.artist = "trackartist"
+ self.i.albumartist = ""
+ self.lib.path_formats = [("default", "$albumartist")]
p = self.i.destination()
- self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b'trackartist')
+ self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b"trackartist")
def test_artist_overrides_albumartist(self):
- self.i.artist = 'theartist'
- self.i.albumartist = 'something'
- self.lib.path_formats = [('default', '$artist')]
+ self.i.artist = "theartist"
+ self.i.albumartist = "something"
+ self.lib.path_formats = [("default", "$artist")]
p = self.i.destination()
- self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b'theartist')
+ self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b"theartist")
def test_albumartist_overrides_artist(self):
- self.i.artist = 'theartist'
- self.i.albumartist = 'something'
- self.lib.path_formats = [('default', '$albumartist')]
+ self.i.artist = "theartist"
+ self.i.albumartist = "something"
+ self.lib.path_formats = [("default", "$albumartist")]
p = self.i.destination()
- self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b'something')
+ self.assertEqual(p.rsplit(util.PATH_SEP, 1)[1], b"something")
def test_unicode_normalized_nfd_on_mac(self):
- instr = unicodedata.normalize('NFC', 'caf\xe9')
- self.lib.path_formats = [('default', instr)]
- dest = self.i.destination(platform='darwin', fragment=True)
- self.assertEqual(dest, unicodedata.normalize('NFD', instr))
+ instr = unicodedata.normalize("NFC", "caf\xe9")
+ self.lib.path_formats = [("default", instr)]
+ dest = self.i.destination(platform="darwin", fragment=True)
+ self.assertEqual(dest, unicodedata.normalize("NFD", instr))
def test_unicode_normalized_nfc_on_linux(self):
- instr = unicodedata.normalize('NFD', 'caf\xe9')
- self.lib.path_formats = [('default', instr)]
- dest = self.i.destination(platform='linux', fragment=True)
- self.assertEqual(dest, unicodedata.normalize('NFC', instr))
+ instr = unicodedata.normalize("NFD", "caf\xe9")
+ self.lib.path_formats = [("default", instr)]
+ dest = self.i.destination(platform="linux", fragment=True)
+ self.assertEqual(dest, unicodedata.normalize("NFC", instr))
def test_non_mbcs_characters_on_windows(self):
oldfunc = sys.getfilesystemencoding
- sys.getfilesystemencoding = lambda: 'mbcs'
+ sys.getfilesystemencoding = lambda: "mbcs"
try:
- self.i.title = 'h\u0259d'
- self.lib.path_formats = [('default', '$title')]
+ self.i.title = "h\u0259d"
+ self.lib.path_formats = [("default", "$title")]
p = self.i.destination()
- self.assertFalse(b'?' in p)
+ self.assertFalse(b"?" in p)
# We use UTF-8 to encode Windows paths now.
- self.assertTrue('h\u0259d'.encode() in p)
+ self.assertTrue("h\u0259d".encode() in p)
finally:
sys.getfilesystemencoding = oldfunc
def test_unicode_extension_in_fragment(self):
- self.lib.path_formats = [('default', 'foo')]
- self.i.path = util.bytestring_path('bar.caf\xe9')
- dest = self.i.destination(platform='linux', fragment=True)
- self.assertEqual(dest, 'foo.caf\xe9')
+ self.lib.path_formats = [("default", "foo")]
+ self.i.path = util.bytestring_path("bar.caf\xe9")
+ dest = self.i.destination(platform="linux", fragment=True)
+ self.assertEqual(dest, "foo.caf\xe9")
def test_asciify_and_replace(self):
- config['asciify_paths'] = True
- self.lib.replacements = [(re.compile('"'), 'q')]
- self.lib.directory = b'lib'
- self.lib.path_formats = [('default', '$title')]
- self.i.title = '\u201c\u00f6\u2014\u00cf\u201d'
- self.assertEqual(self.i.destination(), np('lib/qo--Iq'))
+ config["asciify_paths"] = True
+ self.lib.replacements = [(re.compile('"'), "q")]
+ self.lib.directory = b"lib"
+ self.lib.path_formats = [("default", "$title")]
+ self.i.title = "\u201c\u00f6\u2014\u00cf\u201d"
+ self.assertEqual(self.i.destination(), np("lib/qo--Iq"))
def test_asciify_character_expanding_to_slash(self):
- config['asciify_paths'] = True
- self.lib.directory = b'lib'
- self.lib.path_formats = [('default', '$title')]
- self.i.title = 'ab\xa2\xbdd'
- self.assertEqual(self.i.destination(), np('lib/abC_ 1_2d'))
+ config["asciify_paths"] = True
+ self.lib.directory = b"lib"
+ self.lib.path_formats = [("default", "$title")]
+ self.i.title = "ab\xa2\xbdd"
+ self.assertEqual(self.i.destination(), np("lib/abC_ 1_2d"))
def test_destination_with_replacements(self):
- self.lib.directory = b'base'
- self.lib.replacements = [(re.compile(r'a'), 'e')]
- self.lib.path_formats = [('default', '$album/$title')]
- self.i.title = 'foo'
- self.i.album = 'bar'
- self.assertEqual(self.i.destination(),
- np('base/ber/foo'))
+ self.lib.directory = b"base"
+ self.lib.replacements = [(re.compile(r"a"), "e")]
+ self.lib.path_formats = [("default", "$album/$title")]
+ self.i.title = "foo"
+ self.i.album = "bar"
+ self.assertEqual(self.i.destination(), np("base/ber/foo"))
def test_destination_with_replacements_argument(self):
- self.lib.directory = b'base'
- self.lib.replacements = [(re.compile(r'a'), 'f')]
- self.lib.path_formats = [('default', '$album/$title')]
- self.i.title = 'foo'
- self.i.album = 'bar'
- replacements = [(re.compile(r'a'), 'e')]
- self.assertEqual(self.i.destination(replacements=replacements),
- np('base/ber/foo'))
-
- @unittest.skip('unimplemented: #359')
+ self.lib.directory = b"base"
+ self.lib.replacements = [(re.compile(r"a"), "f")]
+ self.lib.path_formats = [("default", "$album/$title")]
+ self.i.title = "foo"
+ self.i.album = "bar"
+ replacements = [(re.compile(r"a"), "e")]
+ self.assertEqual(
+ self.i.destination(replacements=replacements), np("base/ber/foo")
+ )
+
+ @unittest.skip("unimplemented: #359")
def test_destination_with_empty_component(self):
- self.lib.directory = b'base'
- self.lib.replacements = [(re.compile(r'^$'), '_')]
- self.lib.path_formats = [('default', '$album/$artist/$title')]
- self.i.title = 'three'
- self.i.artist = ''
- self.i.albumartist = ''
- self.i.album = 'one'
- self.assertEqual(self.i.destination(),
- np('base/one/_/three'))
-
- @unittest.skip('unimplemented: #359')
+ self.lib.directory = b"base"
+ self.lib.replacements = [(re.compile(r"^$"), "_")]
+ self.lib.path_formats = [("default", "$album/$artist/$title")]
+ self.i.title = "three"
+ self.i.artist = ""
+ self.i.albumartist = ""
+ self.i.album = "one"
+ self.assertEqual(self.i.destination(), np("base/one/_/three"))
+
+ @unittest.skip("unimplemented: #359")
def test_destination_with_empty_final_component(self):
- self.lib.directory = b'base'
- self.lib.replacements = [(re.compile(r'^$'), '_')]
- self.lib.path_formats = [('default', '$album/$title')]
- self.i.title = ''
- self.i.album = 'one'
- self.i.path = 'foo.mp3'
- self.assertEqual(self.i.destination(),
- np('base/one/_.mp3'))
+ self.lib.directory = b"base"
+ self.lib.replacements = [(re.compile(r"^$"), "_")]
+ self.lib.path_formats = [("default", "$album/$title")]
+ self.i.title = ""
+ self.i.album = "one"
+ self.i.path = "foo.mp3"
+ self.assertEqual(self.i.destination(), np("base/one/_.mp3"))
def test_legalize_path_one_for_one_replacement(self):
# Use a replacement that should always replace the last X in any
# path component with a Z.
self.lib.replacements = [
- (re.compile(r'X$'), 'Z'),
+ (re.compile(r"X$"), "Z"),
]
# Construct an item whose untruncated path ends with a Y but whose
# truncated version ends with an X.
- self.i.title = 'X' * 300 + 'Y'
+ self.i.title = "X" * 300 + "Y"
# The final path should reflect the replacement.
dest = self.i.destination()
- self.assertEqual(dest[-2:], b'XZ')
+ self.assertEqual(dest[-2:], b"XZ")
def test_legalize_path_one_for_many_replacement(self):
# Use a replacement that should always replace the last X in any
# path component with four Zs.
self.lib.replacements = [
- (re.compile(r'X$'), 'ZZZZ'),
+ (re.compile(r"X$"), "ZZZZ"),
]
# Construct an item whose untruncated path ends with a Y but whose
# truncated version ends with an X.
- self.i.title = 'X' * 300 + 'Y'
+ self.i.title = "X" * 300 + "Y"
# The final path should ignore the user replacement and create a path
# of the correct length, containing Xs.
dest = self.i.destination()
- self.assertEqual(dest[-2:], b'XX')
+ self.assertEqual(dest[-2:], b"XX")
def test_album_field_query(self):
- self.lib.directory = b'one'
- self.lib.path_formats = [('default', 'two'),
- ('flex:foo', 'three')]
+ self.lib.directory = b"one"
+ self.lib.path_formats = [("default", "two"), ("flex:foo", "three")]
album = self.lib.add_album([self.i])
- self.assertEqual(self.i.destination(), np('one/two'))
- album['flex'] = 'foo'
+ self.assertEqual(self.i.destination(), np("one/two"))
+ album["flex"] = "foo"
album.store()
- self.assertEqual(self.i.destination(), np('one/three'))
+ self.assertEqual(self.i.destination(), np("one/three"))
def test_album_field_in_template(self):
- self.lib.directory = b'one'
- self.lib.path_formats = [('default', '$flex/two')]
+ self.lib.directory = b"one"
+ self.lib.path_formats = [("default", "$flex/two")]
album = self.lib.add_album([self.i])
- album['flex'] = 'foo'
+ album["flex"] = "foo"
album.store()
- self.assertEqual(self.i.destination(), np('one/foo/two'))
+ self.assertEqual(self.i.destination(), np("one/foo/two"))
class ItemFormattedMappingTest(_common.LibTestCase):
def test_formatted_item_value(self):
formatted = self.i.formatted()
- self.assertEqual(formatted['artist'], 'the artist')
+ self.assertEqual(formatted["artist"], "the artist")
def test_get_unset_field(self):
formatted = self.i.formatted()
with self.assertRaises(KeyError):
- formatted['other_field']
+ formatted["other_field"]
def test_get_method_with_default(self):
formatted = self.i.formatted()
- self.assertEqual(formatted.get('other_field'), '')
+ self.assertEqual(formatted.get("other_field"), "")
def test_get_method_with_specified_default(self):
formatted = self.i.formatted()
- self.assertEqual(formatted.get('other_field', 'default'), 'default')
+ self.assertEqual(formatted.get("other_field", "default"), "default")
def test_item_precedence(self):
album = self.lib.add_album([self.i])
- album['artist'] = 'foo'
+ album["artist"] = "foo"
album.store()
- self.assertNotEqual('foo', self.i.formatted().get('artist'))
+ self.assertNotEqual("foo", self.i.formatted().get("artist"))
def test_album_flex_field(self):
album = self.lib.add_album([self.i])
- album['flex'] = 'foo'
+ album["flex"] = "foo"
album.store()
- self.assertEqual('foo', self.i.formatted().get('flex'))
+ self.assertEqual("foo", self.i.formatted().get("flex"))
def test_album_field_overrides_item_field_for_path(self):
# Make the album inconsistent with the item.
album = self.lib.add_album([self.i])
- album.album = 'foo'
+ album.album = "foo"
album.store()
- self.i.album = 'bar'
+ self.i.album = "bar"
self.i.store()
# Ensure the album takes precedence.
formatted = self.i.formatted(for_path=True)
- self.assertEqual(formatted['album'], 'foo')
+ self.assertEqual(formatted["album"], "foo")
def test_artist_falls_back_to_albumartist(self):
- self.i.artist = ''
+ self.i.artist = ""
formatted = self.i.formatted()
- self.assertEqual(formatted['artist'], 'the album artist')
+ self.assertEqual(formatted["artist"], "the album artist")
def test_albumartist_falls_back_to_artist(self):
- self.i.albumartist = ''
+ self.i.albumartist = ""
formatted = self.i.formatted()
- self.assertEqual(formatted['albumartist'], 'the artist')
+ self.assertEqual(formatted["albumartist"], "the artist")
def test_both_artist_and_albumartist_empty(self):
- self.i.artist = ''
- self.i.albumartist = ''
+ self.i.artist = ""
+ self.i.albumartist = ""
formatted = self.i.formatted()
- self.assertEqual(formatted['albumartist'], '')
+ self.assertEqual(formatted["albumartist"], "")
class PathFormattingMixin:
"""Utilities for testing path formatting."""
+
def _setf(self, fmt):
- self.lib.path_formats.insert(0, ('default', fmt))
+ self.lib.path_formats.insert(0, ("default", fmt))
def _assert_dest(self, dest, i=None):
if i is None:
@@ -623,9 +627,9 @@ def _assert_dest(self, dest, i=None):
class DestinationFunctionTest(_common.TestCase, PathFormattingMixin):
def setUp(self):
super().setUp()
- self.lib = beets.library.Library(':memory:')
- self.lib.directory = b'/base'
- self.lib.path_formats = [('default', 'path')]
+ self.lib = beets.library.Library(":memory:")
+ self.lib.directory = b"/base"
+ self.lib.path_formats = [("default", "path")]
self.i = item(self.lib)
def tearDown(self):
@@ -633,108 +637,108 @@ def tearDown(self):
self.lib._connection().close()
def test_upper_case_literal(self):
- self._setf('%upper{foo}')
- self._assert_dest(b'/base/FOO')
+ self._setf("%upper{foo}")
+ self._assert_dest(b"/base/FOO")
def test_upper_case_variable(self):
- self._setf('%upper{$title}')
- self._assert_dest(b'/base/THE TITLE')
+ self._setf("%upper{$title}")
+ self._assert_dest(b"/base/THE TITLE")
def test_title_case_variable(self):
- self._setf('%title{$title}')
- self._assert_dest(b'/base/The Title')
+ self._setf("%title{$title}")
+ self._assert_dest(b"/base/The Title")
def test_title_case_variable_aphostrophe(self):
- self._setf('%title{I can\'t}')
- self._assert_dest(b'/base/I Can\'t')
+ self._setf("%title{I can't}")
+ self._assert_dest(b"/base/I Can't")
def test_asciify_variable(self):
- self._setf('%asciify{ab\xa2\xbdd}')
- self._assert_dest(b'/base/abC_ 1_2d')
+ self._setf("%asciify{ab\xa2\xbdd}")
+ self._assert_dest(b"/base/abC_ 1_2d")
def test_left_variable(self):
- self._setf('%left{$title, 3}')
- self._assert_dest(b'/base/the')
+ self._setf("%left{$title, 3}")
+ self._assert_dest(b"/base/the")
def test_right_variable(self):
- self._setf('%right{$title,3}')
- self._assert_dest(b'/base/tle')
+ self._setf("%right{$title,3}")
+ self._assert_dest(b"/base/tle")
def test_if_false(self):
- self._setf('x%if{,foo}')
- self._assert_dest(b'/base/x')
+ self._setf("x%if{,foo}")
+ self._assert_dest(b"/base/x")
def test_if_false_value(self):
- self._setf('x%if{false,foo}')
- self._assert_dest(b'/base/x')
+ self._setf("x%if{false,foo}")
+ self._assert_dest(b"/base/x")
def test_if_true(self):
- self._setf('%if{bar,foo}')
- self._assert_dest(b'/base/foo')
+ self._setf("%if{bar,foo}")
+ self._assert_dest(b"/base/foo")
def test_if_else_false(self):
- self._setf('%if{,foo,baz}')
- self._assert_dest(b'/base/baz')
+ self._setf("%if{,foo,baz}")
+ self._assert_dest(b"/base/baz")
def test_if_else_false_value(self):
- self._setf('%if{false,foo,baz}')
- self._assert_dest(b'/base/baz')
+ self._setf("%if{false,foo,baz}")
+ self._assert_dest(b"/base/baz")
def test_if_int_value(self):
- self._setf('%if{0,foo,baz}')
- self._assert_dest(b'/base/baz')
+ self._setf("%if{0,foo,baz}")
+ self._assert_dest(b"/base/baz")
def test_nonexistent_function(self):
- self._setf('%foo{bar}')
- self._assert_dest(b'/base/%foo{bar}')
+ self._setf("%foo{bar}")
+ self._assert_dest(b"/base/%foo{bar}")
def test_if_def_field_return_self(self):
self.i.bar = 3
- self._setf('%ifdef{bar}')
- self._assert_dest(b'/base/3')
+ self._setf("%ifdef{bar}")
+ self._assert_dest(b"/base/3")
def test_if_def_field_not_defined(self):
- self._setf(' %ifdef{bar}/$artist')
- self._assert_dest(b'/base/the artist')
+ self._setf(" %ifdef{bar}/$artist")
+ self._assert_dest(b"/base/the artist")
def test_if_def_field_not_defined_2(self):
- self._setf('$artist/%ifdef{bar}')
- self._assert_dest(b'/base/the artist')
+ self._setf("$artist/%ifdef{bar}")
+ self._assert_dest(b"/base/the artist")
def test_if_def_true(self):
- self._setf('%ifdef{artist,cool}')
- self._assert_dest(b'/base/cool')
+ self._setf("%ifdef{artist,cool}")
+ self._assert_dest(b"/base/cool")
def test_if_def_true_complete(self):
self.i.series = "Now"
- self._setf('%ifdef{series,$series Series,Albums}/$album')
- self._assert_dest(b'/base/Now Series/the album')
+ self._setf("%ifdef{series,$series Series,Albums}/$album")
+ self._assert_dest(b"/base/Now Series/the album")
def test_if_def_false_complete(self):
- self._setf('%ifdef{plays,$plays,not_played}')
- self._assert_dest(b'/base/not_played')
+ self._setf("%ifdef{plays,$plays,not_played}")
+ self._assert_dest(b"/base/not_played")
def test_first(self):
self.i.genres = "Pop; Rock; Classical Crossover"
- self._setf('%first{$genres}')
- self._assert_dest(b'/base/Pop')
+ self._setf("%first{$genres}")
+ self._assert_dest(b"/base/Pop")
def test_first_skip(self):
self.i.genres = "Pop; Rock; Classical Crossover"
- self._setf('%first{$genres,1,2}')
- self._assert_dest(b'/base/Classical Crossover')
+ self._setf("%first{$genres,1,2}")
+ self._assert_dest(b"/base/Classical Crossover")
def test_first_different_sep(self):
- self._setf('%first{Alice / Bob / Eve,2,0, / , & }')
- self._assert_dest(b'/base/Alice & Bob')
+ self._setf("%first{Alice / Bob / Eve,2,0, / , & }")
+ self._assert_dest(b"/base/Alice & Bob")
class DisambiguationTest(_common.TestCase, PathFormattingMixin):
def setUp(self):
super().setUp()
- self.lib = beets.library.Library(':memory:')
- self.lib.directory = b'/base'
- self.lib.path_formats = [('default', 'path')]
+ self.lib = beets.library.Library(":memory:")
+ self.lib.directory = b"/base"
+ self.lib.path_formats = [("default", "path")]
self.i1 = item()
self.i1.year = 2001
@@ -744,86 +748,86 @@ def setUp(self):
self.lib.add_album([self.i2])
self.lib._connection().commit()
- self._setf('foo%aunique{albumartist album,year}/$title')
+ self._setf("foo%aunique{albumartist album,year}/$title")
def tearDown(self):
super().tearDown()
self.lib._connection().close()
def test_unique_expands_to_disambiguating_year(self):
- self._assert_dest(b'/base/foo [2001]/the title', self.i1)
+ self._assert_dest(b"/base/foo [2001]/the title", self.i1)
def test_unique_with_default_arguments_uses_albumtype(self):
album2 = self.lib.get_album(self.i1)
- album2.albumtype = 'bar'
+ album2.albumtype = "bar"
album2.store()
- self._setf('foo%aunique{}/$title')
- self._assert_dest(b'/base/foo [bar]/the title', self.i1)
+ self._setf("foo%aunique{}/$title")
+ self._assert_dest(b"/base/foo [bar]/the title", self.i1)
def test_unique_expands_to_nothing_for_distinct_albums(self):
album2 = self.lib.get_album(self.i2)
- album2.album = 'different album'
+ album2.album = "different album"
album2.store()
- self._assert_dest(b'/base/foo/the title', self.i1)
+ self._assert_dest(b"/base/foo/the title", self.i1)
def test_use_fallback_numbers_when_identical(self):
album2 = self.lib.get_album(self.i2)
album2.year = 2001
album2.store()
- self._assert_dest(b'/base/foo [1]/the title', self.i1)
- self._assert_dest(b'/base/foo [2]/the title', self.i2)
+ self._assert_dest(b"/base/foo [1]/the title", self.i1)
+ self._assert_dest(b"/base/foo [2]/the title", self.i2)
def test_unique_falls_back_to_second_distinguishing_field(self):
- self._setf('foo%aunique{albumartist album,month year}/$title')
- self._assert_dest(b'/base/foo [2001]/the title', self.i1)
+ self._setf("foo%aunique{albumartist album,month year}/$title")
+ self._assert_dest(b"/base/foo [2001]/the title", self.i1)
def test_unique_sanitized(self):
album2 = self.lib.get_album(self.i2)
album2.year = 2001
album1 = self.lib.get_album(self.i1)
- album1.albumtype = 'foo/bar'
+ album1.albumtype = "foo/bar"
album2.store()
album1.store()
- self._setf('foo%aunique{albumartist album,albumtype}/$title')
- self._assert_dest(b'/base/foo [foo_bar]/the title', self.i1)
+ self._setf("foo%aunique{albumartist album,albumtype}/$title")
+ self._assert_dest(b"/base/foo [foo_bar]/the title", self.i1)
def test_drop_empty_disambig_string(self):
album1 = self.lib.get_album(self.i1)
album1.albumdisambig = None
album2 = self.lib.get_album(self.i2)
- album2.albumdisambig = 'foo'
+ album2.albumdisambig = "foo"
album1.store()
album2.store()
- self._setf('foo%aunique{albumartist album,albumdisambig}/$title')
- self._assert_dest(b'/base/foo/the title', self.i1)
+ self._setf("foo%aunique{albumartist album,albumdisambig}/$title")
+ self._assert_dest(b"/base/foo/the title", self.i1)
def test_change_brackets(self):
- self._setf('foo%aunique{albumartist album,year,()}/$title')
- self._assert_dest(b'/base/foo (2001)/the title', self.i1)
+ self._setf("foo%aunique{albumartist album,year,()}/$title")
+ self._assert_dest(b"/base/foo (2001)/the title", self.i1)
def test_remove_brackets(self):
- self._setf('foo%aunique{albumartist album,year,}/$title')
- self._assert_dest(b'/base/foo 2001/the title', self.i1)
+ self._setf("foo%aunique{albumartist album,year,}/$title")
+ self._assert_dest(b"/base/foo 2001/the title", self.i1)
def test_key_flexible_attribute(self):
album1 = self.lib.get_album(self.i1)
- album1.flex = 'flex1'
+ album1.flex = "flex1"
album2 = self.lib.get_album(self.i2)
- album2.flex = 'flex2'
+ album2.flex = "flex2"
album1.store()
album2.store()
- self._setf('foo%aunique{albumartist album flex,year}/$title')
- self._assert_dest(b'/base/foo/the title', self.i1)
+ self._setf("foo%aunique{albumartist album flex,year}/$title")
+ self._assert_dest(b"/base/foo/the title", self.i1)
class SingletonDisambiguationTest(_common.TestCase, PathFormattingMixin):
def setUp(self):
super().setUp()
- self.lib = beets.library.Library(':memory:')
- self.lib.directory = b'/base'
- self.lib.path_formats = [('default', 'path')]
+ self.lib = beets.library.Library(":memory:")
+ self.lib.directory = b"/base"
+ self.lib.path_formats = [("default", "path")]
self.i1 = item()
self.i1.year = 2001
@@ -833,74 +837,74 @@ def setUp(self):
self.lib.add(self.i2)
self.lib._connection().commit()
- self._setf('foo/$title%sunique{artist title,year}')
+ self._setf("foo/$title%sunique{artist title,year}")
def tearDown(self):
super().tearDown()
self.lib._connection().close()
def test_sunique_expands_to_disambiguating_year(self):
- self._assert_dest(b'/base/foo/the title [2001]', self.i1)
+ self._assert_dest(b"/base/foo/the title [2001]", self.i1)
def test_sunique_with_default_arguments_uses_trackdisambig(self):
- self.i1.trackdisambig = 'live version'
+ self.i1.trackdisambig = "live version"
self.i1.year = self.i2.year
self.i1.store()
- self._setf('foo/$title%sunique{}')
- self._assert_dest(b'/base/foo/the title [live version]', self.i1)
+ self._setf("foo/$title%sunique{}")
+ self._assert_dest(b"/base/foo/the title [live version]", self.i1)
def test_sunique_expands_to_nothing_for_distinct_singletons(self):
- self.i2.title = 'different track'
+ self.i2.title = "different track"
self.i2.store()
- self._assert_dest(b'/base/foo/the title', self.i1)
+ self._assert_dest(b"/base/foo/the title", self.i1)
def test_sunique_does_not_match_album(self):
self.lib.add_album([self.i2])
- self._assert_dest(b'/base/foo/the title', self.i1)
+ self._assert_dest(b"/base/foo/the title", self.i1)
def test_sunique_use_fallback_numbers_when_identical(self):
self.i2.year = self.i1.year
self.i2.store()
- self._assert_dest(b'/base/foo/the title [1]', self.i1)
- self._assert_dest(b'/base/foo/the title [2]', self.i2)
+ self._assert_dest(b"/base/foo/the title [1]", self.i1)
+ self._assert_dest(b"/base/foo/the title [2]", self.i2)
def test_sunique_falls_back_to_second_distinguishing_field(self):
- self._setf('foo/$title%sunique{albumartist album,month year}')
- self._assert_dest(b'/base/foo/the title [2001]', self.i1)
+ self._setf("foo/$title%sunique{albumartist album,month year}")
+ self._assert_dest(b"/base/foo/the title [2001]", self.i1)
def test_sunique_sanitized(self):
self.i2.year = self.i1.year
- self.i1.trackdisambig = 'foo/bar'
+ self.i1.trackdisambig = "foo/bar"
self.i2.store()
self.i1.store()
- self._setf('foo/$title%sunique{artist title,trackdisambig}')
- self._assert_dest(b'/base/foo/the title [foo_bar]', self.i1)
+ self._setf("foo/$title%sunique{artist title,trackdisambig}")
+ self._assert_dest(b"/base/foo/the title [foo_bar]", self.i1)
def test_drop_empty_disambig_string(self):
self.i1.trackdisambig = None
- self.i2.trackdisambig = 'foo'
+ self.i2.trackdisambig = "foo"
self.i1.store()
self.i2.store()
- self._setf('foo/$title%sunique{albumartist album,trackdisambig}')
- self._assert_dest(b'/base/foo/the title', self.i1)
+ self._setf("foo/$title%sunique{albumartist album,trackdisambig}")
+ self._assert_dest(b"/base/foo/the title", self.i1)
def test_change_brackets(self):
- self._setf('foo/$title%sunique{artist title,year,()}')
- self._assert_dest(b'/base/foo/the title (2001)', self.i1)
+ self._setf("foo/$title%sunique{artist title,year,()}")
+ self._assert_dest(b"/base/foo/the title (2001)", self.i1)
def test_remove_brackets(self):
- self._setf('foo/$title%sunique{artist title,year,}')
- self._assert_dest(b'/base/foo/the title 2001', self.i1)
+ self._setf("foo/$title%sunique{artist title,year,}")
+ self._assert_dest(b"/base/foo/the title 2001", self.i1)
def test_key_flexible_attribute(self):
- self.i1.flex = 'flex1'
- self.i2.flex = 'flex2'
+ self.i1.flex = "flex1"
+ self.i2.flex = "flex2"
self.i1.store()
self.i2.store()
- self._setf('foo/$title%sunique{artist title flex,year}')
- self._assert_dest(b'/base/foo/the title', self.i1)
+ self._setf("foo/$title%sunique{artist title flex,year}")
+ self._assert_dest(b"/base/foo/the title", self.i1)
class PluginDestinationTest(_common.TestCase):
@@ -919,9 +923,9 @@ def field_getters():
self.old_field_getters = plugins.item_field_getters
plugins.item_field_getters = field_getters
- self.lib = beets.library.Library(':memory:')
- self.lib.directory = b'/base'
- self.lib.path_formats = [('default', '$artist $foo')]
+ self.lib = beets.library.Library(":memory:")
+ self.lib.directory = b"/base"
+ self.lib.path_formats = [("default", "$artist $foo")]
self.i = item(self.lib)
def tearDown(self):
@@ -931,34 +935,34 @@ def tearDown(self):
def _assert_dest(self, dest):
with _common.platform_posix():
the_dest = self.i.destination()
- self.assertEqual(the_dest, b'/base/' + dest)
+ self.assertEqual(the_dest, b"/base/" + dest)
def test_undefined_value_not_substituted(self):
- self._assert_dest(b'the artist $foo')
+ self._assert_dest(b"the artist $foo")
def test_plugin_value_not_substituted(self):
self._tv_map = {
- 'foo': 'bar',
+ "foo": "bar",
}
- self._assert_dest(b'the artist bar')
+ self._assert_dest(b"the artist bar")
def test_plugin_value_overrides_attribute(self):
self._tv_map = {
- 'artist': 'bar',
+ "artist": "bar",
}
- self._assert_dest(b'bar $foo')
+ self._assert_dest(b"bar $foo")
def test_plugin_value_sanitized(self):
self._tv_map = {
- 'foo': 'bar/baz',
+ "foo": "bar/baz",
}
- self._assert_dest(b'the artist bar_baz')
+ self._assert_dest(b"the artist bar_baz")
class AlbumInfoTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.lib = beets.library.Library(':memory:')
+ self.lib = beets.library.Library(":memory:")
self.i = item()
self.lib.add_album((self.i,))
@@ -971,10 +975,10 @@ def test_albuminfo_reflects_metadata(self):
def test_albuminfo_stores_art(self):
ai = self.lib.get_album(self.i)
- ai.artpath = '/my/great/art'
+ ai.artpath = "/my/great/art"
ai.store()
new_ai = self.lib.get_album(self.i)
- self.assertEqual(new_ai.artpath, b'/my/great/art')
+ self.assertEqual(new_ai.artpath, b"/my/great/art")
def test_albuminfo_for_two_items_doesnt_duplicate_row(self):
i2 = item(self.lib)
@@ -982,14 +986,14 @@ def test_albuminfo_for_two_items_doesnt_duplicate_row(self):
self.lib.get_album(i2)
c = self.lib._connection().cursor()
- c.execute('select * from albums where album=?', (self.i.album,))
+ c.execute("select * from albums where album=?", (self.i.album,))
# Cursor should only return one row.
self.assertNotEqual(c.fetchone(), None)
self.assertEqual(c.fetchone(), None)
def test_individual_tracks_have_no_albuminfo(self):
i2 = item()
- i2.album = 'aTotallyDifferentAlbum'
+ i2.album = "aTotallyDifferentAlbum"
self.lib.add(i2)
ai = self.lib.get_album(i2)
self.assertEqual(ai, None)
@@ -1009,38 +1013,38 @@ def test_album_items_consistent(self):
def test_albuminfo_changes_affect_items(self):
ai = self.lib.get_album(self.i)
- ai.album = 'myNewAlbum'
+ ai.album = "myNewAlbum"
ai.store()
i = self.lib.items()[0]
- self.assertEqual(i.album, 'myNewAlbum')
+ self.assertEqual(i.album, "myNewAlbum")
def test_albuminfo_change_albumartist_changes_items(self):
ai = self.lib.get_album(self.i)
- ai.albumartist = 'myNewArtist'
+ ai.albumartist = "myNewArtist"
ai.store()
i = self.lib.items()[0]
- self.assertEqual(i.albumartist, 'myNewArtist')
- self.assertNotEqual(i.artist, 'myNewArtist')
+ self.assertEqual(i.albumartist, "myNewArtist")
+ self.assertNotEqual(i.artist, "myNewArtist")
def test_albuminfo_change_artist_does_change_items(self):
ai = self.lib.get_album(self.i)
- ai.artist = 'myNewArtist'
+ ai.artist = "myNewArtist"
ai.store(inherit=True)
i = self.lib.items()[0]
- self.assertEqual(i.artist, 'myNewArtist')
+ self.assertEqual(i.artist, "myNewArtist")
def test_albuminfo_change_artist_does_not_change_items(self):
ai = self.lib.get_album(self.i)
- ai.artist = 'myNewArtist'
+ ai.artist = "myNewArtist"
ai.store(inherit=False)
i = self.lib.items()[0]
- self.assertNotEqual(i.artist, 'myNewArtist')
+ self.assertNotEqual(i.artist, "myNewArtist")
def test_albuminfo_remove_removes_items(self):
item_id = self.i.id
self.lib.get_album(self.i).remove()
c = self.lib._connection().execute(
- 'SELECT id FROM items WHERE id=?', (item_id,)
+ "SELECT id FROM items WHERE id=?", (item_id,)
)
self.assertEqual(c.fetchone(), None)
@@ -1051,7 +1055,7 @@ def test_removing_last_item_removes_album(self):
def test_noop_albuminfo_changes_affect_items(self):
i = self.lib.items()[0]
- i.album = 'foobar'
+ i.album = "foobar"
i.store()
ai = self.lib.get_album(self.i)
ai.album = ai.album
@@ -1063,35 +1067,35 @@ def test_noop_albuminfo_changes_affect_items(self):
class ArtDestinationTest(_common.TestCase):
def setUp(self):
super().setUp()
- config['art_filename'] = 'artimage'
- config['replace'] = {'X': 'Y'}
+ config["art_filename"] = "artimage"
+ config["replace"] = {"X": "Y"}
self.lib = beets.library.Library(
- ':memory:', replacements=[(re.compile('X'), 'Y')]
+ ":memory:", replacements=[(re.compile("X"), "Y")]
)
self.i = item(self.lib)
self.i.path = self.i.destination()
self.ai = self.lib.add_album((self.i,))
def test_art_filename_respects_setting(self):
- art = self.ai.art_destination('something.jpg')
- new_art = bytestring_path('%sartimage.jpg' % os.path.sep)
+ art = self.ai.art_destination("something.jpg")
+ new_art = bytestring_path("%sartimage.jpg" % os.path.sep)
self.assertTrue(new_art in art)
def test_art_path_in_item_dir(self):
- art = self.ai.art_destination('something.jpg')
+ art = self.ai.art_destination("something.jpg")
track = self.i.destination()
self.assertEqual(os.path.dirname(art), os.path.dirname(track))
def test_art_path_sanitized(self):
- config['art_filename'] = 'artXimage'
- art = self.ai.art_destination('something.jpg')
- self.assertTrue(b'artYimage' in art)
+ config["art_filename"] = "artXimage"
+ art = self.ai.art_destination("something.jpg")
+ self.assertTrue(b"artYimage" in art)
class PathStringTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.lib = beets.library.Library(':memory:')
+ self.lib = beets.library.Library(":memory:")
self.i = item(self.lib)
def test_item_path_is_bytestring(self):
@@ -1102,18 +1106,21 @@ def test_fetched_item_path_is_bytestring(self):
self.assertTrue(isinstance(i.path, bytes))
def test_unicode_path_becomes_bytestring(self):
- self.i.path = 'unicodepath'
+ self.i.path = "unicodepath"
self.assertTrue(isinstance(self.i.path, bytes))
def test_unicode_in_database_becomes_bytestring(self):
- self.lib._connection().execute("""
+ self.lib._connection().execute(
+ """
update items set path=? where id=?
- """, (self.i.id, 'somepath'))
+ """,
+ (self.i.id, "somepath"),
+ )
i = list(self.lib.items())[0]
self.assertTrue(isinstance(i.path, bytes))
def test_special_chars_preserved_in_database(self):
- path = 'b\xe1r'.encode()
+ path = "b\xe1r".encode()
self.i.path = path
self.i.store()
i = list(self.lib.items())[0]
@@ -1121,7 +1128,7 @@ def test_special_chars_preserved_in_database(self):
def test_special_char_path_added_to_database(self):
self.i.remove()
- path = 'b\xe1r'.encode()
+ path = "b\xe1r".encode()
i = item()
i.path = path
self.lib.add(i)
@@ -1129,18 +1136,18 @@ def test_special_char_path_added_to_database(self):
self.assertEqual(i.path, path)
def test_destination_returns_bytestring(self):
- self.i.artist = 'b\xe1r'
+ self.i.artist = "b\xe1r"
dest = self.i.destination()
self.assertTrue(isinstance(dest, bytes))
def test_art_destination_returns_bytestring(self):
- self.i.artist = 'b\xe1r'
+ self.i.artist = "b\xe1r"
alb = self.lib.add_album([self.i])
- dest = alb.art_destination('image.jpg')
+ dest = alb.art_destination("image.jpg")
self.assertTrue(isinstance(dest, bytes))
def test_artpath_stores_special_chars(self):
- path = b'b\xe1r'
+ path = b"b\xe1r"
alb = self.lib.add_album([self.i])
alb.artpath = path
alb.store()
@@ -1148,25 +1155,24 @@ def test_artpath_stores_special_chars(self):
self.assertEqual(path, alb.artpath)
def test_sanitize_path_with_special_chars(self):
- path = 'b\xe1r?'
+ path = "b\xe1r?"
new_path = util.sanitize_path(path)
- self.assertTrue(new_path.startswith('b\xe1r'))
+ self.assertTrue(new_path.startswith("b\xe1r"))
def test_sanitize_path_returns_unicode(self):
- path = 'b\xe1r?'
+ path = "b\xe1r?"
new_path = util.sanitize_path(path)
self.assertTrue(isinstance(new_path, str))
def test_unicode_artpath_becomes_bytestring(self):
alb = self.lib.add_album([self.i])
- alb.artpath = 'somep\xe1th'
+ alb.artpath = "somep\xe1th"
self.assertTrue(isinstance(alb.artpath, bytes))
def test_unicode_artpath_in_database_decoded(self):
alb = self.lib.add_album([self.i])
self.lib._connection().execute(
- "update albums set artpath=? where id=?",
- ('somep\xe1th', alb.id)
+ "update albums set artpath=? where id=?", ("somep\xe1th", alb.id)
)
alb = self.lib.get_album(alb.id)
self.assertTrue(isinstance(alb.artpath, bytes))
@@ -1175,13 +1181,13 @@ def test_unicode_artpath_in_database_decoded(self):
class MtimeTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.ipath = os.path.join(self.temp_dir, b'testfile.mp3')
+ self.ipath = os.path.join(self.temp_dir, b"testfile.mp3")
shutil.copy(
- syspath(os.path.join(_common.RSRC, b'full.mp3')),
+ syspath(os.path.join(_common.RSRC, b"full.mp3")),
syspath(self.ipath),
)
self.i = beets.library.Item.from_path(self.ipath)
- self.lib = beets.library.Library(':memory:')
+ self.lib = beets.library.Library(":memory:")
self.lib.add(self.i)
def tearDown(self):
@@ -1196,16 +1202,16 @@ def test_mtime_initially_up_to_date(self):
self.assertGreaterEqual(self.i.mtime, self._mtime())
def test_mtime_reset_on_db_modify(self):
- self.i.title = 'something else'
+ self.i.title = "something else"
self.assertLess(self.i.mtime, self._mtime())
def test_mtime_up_to_date_after_write(self):
- self.i.title = 'something else'
+ self.i.title = "something else"
self.i.write()
self.assertGreaterEqual(self.i.mtime, self._mtime())
def test_mtime_up_to_date_after_read(self):
- self.i.title = 'something else'
+ self.i.title = "something else"
self.i.read()
self.assertGreaterEqual(self.i.mtime, self._mtime())
@@ -1213,7 +1219,7 @@ def test_mtime_up_to_date_after_read(self):
class ImportTimeTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.lib = beets.library.Library(':memory:')
+ self.lib = beets.library.Library(":memory:")
def added(self):
self.track = item()
@@ -1230,36 +1236,35 @@ class TemplateTest(_common.LibTestCase):
def test_year_formatted_in_template(self):
self.i.year = 123
self.i.store()
- self.assertEqual(self.i.evaluate_template('$year'), '0123')
+ self.assertEqual(self.i.evaluate_template("$year"), "0123")
def test_album_flexattr_appears_in_item_template(self):
self.album = self.lib.add_album([self.i])
- self.album.foo = 'baz'
+ self.album.foo = "baz"
self.album.store()
- self.assertEqual(self.i.evaluate_template('$foo'), 'baz')
+ self.assertEqual(self.i.evaluate_template("$foo"), "baz")
def test_album_and_item_format(self):
- config['format_album'] = 'foö $foo'
+ config["format_album"] = "foö $foo"
album = beets.library.Album()
- album.foo = 'bar'
- album.tagada = 'togodo'
+ album.foo = "bar"
+ album.tagada = "togodo"
self.assertEqual(f"{album}", "foö bar")
self.assertEqual(f"{album:$tagada}", "togodo")
self.assertEqual(str(album), "foö bar")
self.assertEqual(bytes(album), b"fo\xc3\xb6 bar")
- config['format_item'] = 'bar $foo'
+ config["format_item"] = "bar $foo"
item = beets.library.Item()
- item.foo = 'bar'
- item.tagada = 'togodo'
+ item.foo = "bar"
+ item.tagada = "togodo"
self.assertEqual(f"{item}", "bar bar")
self.assertEqual(f"{item:$tagada}", "togodo")
class UnicodePathTest(_common.LibTestCase):
def test_unicode_path(self):
- self.i.path = os.path.join(_common.RSRC,
- 'unicode\u2019d.mp3'.encode())
+ self.i.path = os.path.join(_common.RSRC, "unicode\u2019d.mp3".encode())
# If there are any problems with unicode paths, we will raise
# here and fail.
self.i.read()
@@ -1275,7 +1280,7 @@ def tearDown(self):
def test_write_nonexistant(self):
item = self.create_item()
- item.path = b'/path/does/not/exist'
+ item.path = b"/path/does/not/exist"
with self.assertRaises(beets.library.ReadError):
item.write()
@@ -1293,69 +1298,65 @@ def test_no_write_permission(self):
def test_write_with_custom_path(self):
item = self.add_item_fixture()
- custom_path = os.path.join(self.temp_dir, b'custom.mp3')
+ custom_path = os.path.join(self.temp_dir, b"custom.mp3")
shutil.copy(syspath(item.path), syspath(custom_path))
- item['artist'] = 'new artist'
- self.assertNotEqual(MediaFile(syspath(custom_path)).artist,
- 'new artist')
- self.assertNotEqual(MediaFile(syspath(item.path)).artist,
- 'new artist')
+ item["artist"] = "new artist"
+ self.assertNotEqual(
+ MediaFile(syspath(custom_path)).artist, "new artist"
+ )
+ self.assertNotEqual(MediaFile(syspath(item.path)).artist, "new artist")
item.write(custom_path)
- self.assertEqual(MediaFile(syspath(custom_path)).artist, 'new artist')
- self.assertNotEqual(MediaFile(syspath(item.path)).artist, 'new artist')
+ self.assertEqual(MediaFile(syspath(custom_path)).artist, "new artist")
+ self.assertNotEqual(MediaFile(syspath(item.path)).artist, "new artist")
def test_write_custom_tags(self):
- item = self.add_item_fixture(artist='old artist')
- item.write(tags={'artist': 'new artist'})
- self.assertNotEqual(item.artist, 'new artist')
- self.assertEqual(MediaFile(syspath(item.path)).artist, 'new artist')
+ item = self.add_item_fixture(artist="old artist")
+ item.write(tags={"artist": "new artist"})
+ self.assertNotEqual(item.artist, "new artist")
+ self.assertEqual(MediaFile(syspath(item.path)).artist, "new artist")
def test_write_multi_tags(self):
- item = self.add_item_fixture(artist='old artist')
- item.write(tags={'artists': ['old artist', 'another artist']})
+ item = self.add_item_fixture(artist="old artist")
+ item.write(tags={"artists": ["old artist", "another artist"]})
self.assertEqual(
MediaFile(syspath(item.path)).artists,
- ['old artist', 'another artist']
+ ["old artist", "another artist"],
)
def test_write_multi_tags_id3v23(self):
- item = self.add_item_fixture(artist='old artist')
+ item = self.add_item_fixture(artist="old artist")
item.write(
- tags={'artists': ['old artist', 'another artist']},
- id3v23=True
+ tags={"artists": ["old artist", "another artist"]}, id3v23=True
)
self.assertEqual(
- MediaFile(syspath(item.path)).artists,
- ['old artist/another artist']
+ MediaFile(syspath(item.path)).artists, ["old artist/another artist"]
)
def test_write_date_field(self):
# Since `date` is not a MediaField, this should do nothing.
item = self.add_item_fixture()
clean_year = item.year
- item.date = 'foo'
+ item.date = "foo"
item.write()
self.assertEqual(MediaFile(syspath(item.path)).year, clean_year)
class ItemReadTest(unittest.TestCase):
-
def test_unreadable_raise_read_error(self):
- unreadable = os.path.join(_common.RSRC, b'image-2x3.png')
+ unreadable = os.path.join(_common.RSRC, b"image-2x3.png")
item = beets.library.Item()
with self.assertRaises(beets.library.ReadError) as cm:
item.read(unreadable)
- self.assertIsInstance(cm.exception.reason,
- UnreadableFileError)
+ self.assertIsInstance(cm.exception.reason, UnreadableFileError)
def test_nonexistent_raise_read_error(self):
item = beets.library.Item()
with self.assertRaises(beets.library.ReadError):
- item.read('/thisfiledoesnotexist')
+ item.read("/thisfiledoesnotexist")
class FilesizeTest(unittest.TestCase, TestHelper):
@@ -1378,8 +1379,7 @@ class ParseQueryTest(unittest.TestCase):
def test_parse_invalid_query_string(self):
with self.assertRaises(beets.dbcore.InvalidQueryError) as raised:
beets.library.parse_query_string('foo"', None)
- self.assertIsInstance(raised.exception,
- beets.dbcore.query.ParsingError)
+ self.assertIsInstance(raised.exception, beets.dbcore.query.ParsingError)
def test_parse_bytes(self):
with self.assertRaises(AssertionError):
@@ -1388,54 +1388,53 @@ def test_parse_bytes(self):
class LibraryFieldTypesTest(unittest.TestCase):
"""Test format() and parse() for library-specific field types"""
+
def test_datetype(self):
t = beets.library.DateType()
# format
- time_format = beets.config['time_format'].as_str()
- time_local = time.strftime(time_format,
- time.localtime(123456789))
+ time_format = beets.config["time_format"].as_str()
+ time_local = time.strftime(time_format, time.localtime(123456789))
self.assertEqual(time_local, t.format(123456789))
# parse
self.assertEqual(123456789.0, t.parse(time_local))
- self.assertEqual(123456789.0, t.parse('123456789.0'))
- self.assertEqual(t.null, t.parse('not123456789.0'))
- self.assertEqual(t.null, t.parse('1973-11-29'))
+ self.assertEqual(123456789.0, t.parse("123456789.0"))
+ self.assertEqual(t.null, t.parse("not123456789.0"))
+ self.assertEqual(t.null, t.parse("1973-11-29"))
def test_pathtype(self):
t = beets.library.PathType()
# format
- self.assertEqual('/tmp', t.format('/tmp'))
- self.assertEqual('/tmp/\xe4lbum', t.format('/tmp/\u00e4lbum'))
+ self.assertEqual("/tmp", t.format("/tmp"))
+ self.assertEqual("/tmp/\xe4lbum", t.format("/tmp/\u00e4lbum"))
# parse
- self.assertEqual(np(b'/tmp'), t.parse('/tmp'))
- self.assertEqual(np(b'/tmp/\xc3\xa4lbum'),
- t.parse('/tmp/\u00e4lbum/'))
+ self.assertEqual(np(b"/tmp"), t.parse("/tmp"))
+ self.assertEqual(np(b"/tmp/\xc3\xa4lbum"), t.parse("/tmp/\u00e4lbum/"))
def test_musicalkey(self):
t = beets.library.MusicalKey()
# parse
- self.assertEqual('C#m', t.parse('c#m'))
- self.assertEqual('Gm', t.parse('g minor'))
- self.assertEqual('Not c#m', t.parse('not C#m'))
+ self.assertEqual("C#m", t.parse("c#m"))
+ self.assertEqual("Gm", t.parse("g minor"))
+ self.assertEqual("Not c#m", t.parse("not C#m"))
def test_durationtype(self):
t = beets.library.DurationType()
# format
- self.assertEqual('1:01', t.format(61.23))
- self.assertEqual('60:01', t.format(3601.23))
- self.assertEqual('0:00', t.format(None))
+ self.assertEqual("1:01", t.format(61.23))
+ self.assertEqual("60:01", t.format(3601.23))
+ self.assertEqual("0:00", t.format(None))
# parse
- self.assertEqual(61.0, t.parse('1:01'))
- self.assertEqual(61.23, t.parse('61.23'))
- self.assertEqual(3601.0, t.parse('60:01'))
- self.assertEqual(t.null, t.parse('1:00:01'))
- self.assertEqual(t.null, t.parse('not61.23'))
+ self.assertEqual(61.0, t.parse("1:01"))
+ self.assertEqual(61.23, t.parse("61.23"))
+ self.assertEqual(3601.0, t.parse("60:01"))
+ self.assertEqual(t.null, t.parse("1:00:01"))
+ self.assertEqual(t.null, t.parse("not61.23"))
# config format_raw_length
- beets.config['format_raw_length'] = True
+ beets.config["format_raw_length"] = True
self.assertEqual(61.23, t.format(61.23))
self.assertEqual(3601.23, t.format(3601.23))
@@ -1444,5 +1443,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_logging.py b/test/test_logging.py
index 79ff5cae20..5e019786b6 100644
--- a/test/test_logging.py
+++ b/test/test_logging.py
@@ -1,17 +1,16 @@
"""Stupid tests that ensure logging works as expected"""
+import logging as log
import sys
import threading
-import logging as log
-from io import StringIO
import unittest
+from io import StringIO
+from test import _common, helper
+from test._common import TestCase
import beets.logging as blog
-from beets import plugins, ui
import beetsplug
-from test import _common
-from test._common import TestCase
-from test import helper
+from beets import plugins, ui
class LoggingTest(TestCase):
@@ -25,8 +24,9 @@ def test_logging_management(self):
l4 = log.getLogger("bar123")
self.assertEqual(l3, l4)
self.assertEqual(l3.__class__, blog.BeetsLogger)
- self.assertIsInstance(l3, (blog.StrFormatLogger,
- blog.ThreadLocalLevelLogger))
+ self.assertIsInstance(
+ l3, (blog.StrFormatLogger, blog.ThreadLocalLevelLogger)
+ )
l5 = l3.getChild("shalala")
self.assertEqual(l5.__class__, blog.BeetsLogger)
@@ -51,114 +51,114 @@ class LoggingLevelTest(unittest.TestCase, helper.TestHelper):
class DummyModule:
class DummyPlugin(plugins.BeetsPlugin):
def __init__(self):
- plugins.BeetsPlugin.__init__(self, 'dummy')
+ plugins.BeetsPlugin.__init__(self, "dummy")
self.import_stages = [self.import_stage]
- self.register_listener('dummy_event', self.listener)
+ self.register_listener("dummy_event", self.listener)
def log_all(self, name):
- self._log.debug('debug ' + name)
- self._log.info('info ' + name)
- self._log.warning('warning ' + name)
+ self._log.debug("debug " + name)
+ self._log.info("info " + name)
+ self._log.warning("warning " + name)
def commands(self):
- cmd = ui.Subcommand('dummy')
- cmd.func = lambda _, __, ___: self.log_all('cmd')
+ cmd = ui.Subcommand("dummy")
+ cmd.func = lambda _, __, ___: self.log_all("cmd")
return (cmd,)
def import_stage(self, session, task):
- self.log_all('import_stage')
+ self.log_all("import_stage")
def listener(self):
- self.log_all('listener')
+ self.log_all("listener")
def setUp(self):
- sys.modules['beetsplug.dummy'] = self.DummyModule
+ sys.modules["beetsplug.dummy"] = self.DummyModule
beetsplug.dummy = self.DummyModule
self.setup_beets()
- self.load_plugins('dummy')
+ self.load_plugins("dummy")
def tearDown(self):
self.unload_plugins()
self.teardown_beets()
del beetsplug.dummy
- sys.modules.pop('beetsplug.dummy')
+ sys.modules.pop("beetsplug.dummy")
self.DummyModule.DummyPlugin.listeners = None
self.DummyModule.DummyPlugin._raw_listeners = None
def test_command_level0(self):
- self.config['verbose'] = 0
+ self.config["verbose"] = 0
with helper.capture_log() as logs:
- self.run_command('dummy')
- self.assertIn('dummy: warning cmd', logs)
- self.assertIn('dummy: info cmd', logs)
- self.assertNotIn('dummy: debug cmd', logs)
+ self.run_command("dummy")
+ self.assertIn("dummy: warning cmd", logs)
+ self.assertIn("dummy: info cmd", logs)
+ self.assertNotIn("dummy: debug cmd", logs)
def test_command_level1(self):
- self.config['verbose'] = 1
+ self.config["verbose"] = 1
with helper.capture_log() as logs:
- self.run_command('dummy')
- self.assertIn('dummy: warning cmd', logs)
- self.assertIn('dummy: info cmd', logs)
- self.assertIn('dummy: debug cmd', logs)
+ self.run_command("dummy")
+ self.assertIn("dummy: warning cmd", logs)
+ self.assertIn("dummy: info cmd", logs)
+ self.assertIn("dummy: debug cmd", logs)
def test_command_level2(self):
- self.config['verbose'] = 2
+ self.config["verbose"] = 2
with helper.capture_log() as logs:
- self.run_command('dummy')
- self.assertIn('dummy: warning cmd', logs)
- self.assertIn('dummy: info cmd', logs)
- self.assertIn('dummy: debug cmd', logs)
+ self.run_command("dummy")
+ self.assertIn("dummy: warning cmd", logs)
+ self.assertIn("dummy: info cmd", logs)
+ self.assertIn("dummy: debug cmd", logs)
def test_listener_level0(self):
- self.config['verbose'] = 0
+ self.config["verbose"] = 0
with helper.capture_log() as logs:
- plugins.send('dummy_event')
- self.assertIn('dummy: warning listener', logs)
- self.assertNotIn('dummy: info listener', logs)
- self.assertNotIn('dummy: debug listener', logs)
+ plugins.send("dummy_event")
+ self.assertIn("dummy: warning listener", logs)
+ self.assertNotIn("dummy: info listener", logs)
+ self.assertNotIn("dummy: debug listener", logs)
def test_listener_level1(self):
- self.config['verbose'] = 1
+ self.config["verbose"] = 1
with helper.capture_log() as logs:
- plugins.send('dummy_event')
- self.assertIn('dummy: warning listener', logs)
- self.assertIn('dummy: info listener', logs)
- self.assertNotIn('dummy: debug listener', logs)
+ plugins.send("dummy_event")
+ self.assertIn("dummy: warning listener", logs)
+ self.assertIn("dummy: info listener", logs)
+ self.assertNotIn("dummy: debug listener", logs)
def test_listener_level2(self):
- self.config['verbose'] = 2
+ self.config["verbose"] = 2
with helper.capture_log() as logs:
- plugins.send('dummy_event')
- self.assertIn('dummy: warning listener', logs)
- self.assertIn('dummy: info listener', logs)
- self.assertIn('dummy: debug listener', logs)
+ plugins.send("dummy_event")
+ self.assertIn("dummy: warning listener", logs)
+ self.assertIn("dummy: info listener", logs)
+ self.assertIn("dummy: debug listener", logs)
def test_import_stage_level0(self):
- self.config['verbose'] = 0
+ self.config["verbose"] = 0
with helper.capture_log() as logs:
importer = self.create_importer()
importer.run()
- self.assertIn('dummy: warning import_stage', logs)
- self.assertNotIn('dummy: info import_stage', logs)
- self.assertNotIn('dummy: debug import_stage', logs)
+ self.assertIn("dummy: warning import_stage", logs)
+ self.assertNotIn("dummy: info import_stage", logs)
+ self.assertNotIn("dummy: debug import_stage", logs)
def test_import_stage_level1(self):
- self.config['verbose'] = 1
+ self.config["verbose"] = 1
with helper.capture_log() as logs:
importer = self.create_importer()
importer.run()
- self.assertIn('dummy: warning import_stage', logs)
- self.assertIn('dummy: info import_stage', logs)
- self.assertNotIn('dummy: debug import_stage', logs)
+ self.assertIn("dummy: warning import_stage", logs)
+ self.assertIn("dummy: info import_stage", logs)
+ self.assertNotIn("dummy: debug import_stage", logs)
def test_import_stage_level2(self):
- self.config['verbose'] = 2
+ self.config["verbose"] = 2
with helper.capture_log() as logs:
importer = self.create_importer()
importer.run()
- self.assertIn('dummy: warning import_stage', logs)
- self.assertIn('dummy: info import_stage', logs)
- self.assertIn('dummy: debug import_stage', logs)
+ self.assertIn("dummy: warning import_stage", logs)
+ self.assertIn("dummy: info import_stage", logs)
+ self.assertIn("dummy: debug import_stage", logs)
@_common.slow_test()
@@ -167,11 +167,12 @@ class ConcurrentEventsTest(TestCase, helper.TestHelper):
events interaction. Since this is a bit heavy we don't do it in
LoggingLevelTest.
"""
+
class DummyPlugin(plugins.BeetsPlugin):
def __init__(self, test_case):
- plugins.BeetsPlugin.__init__(self, 'dummy')
- self.register_listener('dummy_event1', self.listener1)
- self.register_listener('dummy_event2', self.listener2)
+ plugins.BeetsPlugin.__init__(self, "dummy")
+ self.register_listener("dummy_event1", self.listener1)
+ self.register_listener("dummy_event2", self.listener2)
self.lock1 = threading.Lock()
self.lock2 = threading.Lock()
self.test_case = test_case
@@ -179,9 +180,9 @@ def __init__(self, test_case):
self.t1_step = self.t2_step = 0
def log_all(self, name):
- self._log.debug('debug ' + name)
- self._log.info('info ' + name)
- self._log.warning('warning ' + name)
+ self._log.debug("debug " + name)
+ self._log.info("info " + name)
+ self._log.warning("warning " + name)
def listener1(self):
try:
@@ -221,16 +222,16 @@ def check_dp_exc():
dp.lock2.acquire()
self.assertEqual(dp._log.level, log.NOTSET)
- self.config['verbose'] = 1
- t1 = threading.Thread(target=dp.listeners['dummy_event1'][0])
+ self.config["verbose"] = 1
+ t1 = threading.Thread(target=dp.listeners["dummy_event1"][0])
t1.start() # blocked. t1 tested its log level
while dp.t1_step != 1:
check_dp_exc()
self.assertTrue(t1.is_alive())
self.assertEqual(dp._log.level, log.NOTSET)
- self.config['verbose'] = 2
- t2 = threading.Thread(target=dp.listeners['dummy_event2'][0])
+ self.config["verbose"] = 2
+ t2 = threading.Thread(target=dp.listeners["dummy_event2"][0])
t2.start() # blocked. t2 tested its log level
while dp.t2_step != 1:
check_dp_exc()
@@ -240,7 +241,7 @@ def check_dp_exc():
dp.lock1.release() # dummy_event1 tests its log level + finishes
while dp.t1_step != 2:
check_dp_exc()
- t1.join(.1)
+ t1.join(0.1)
self.assertFalse(t1.is_alive())
self.assertTrue(t2.is_alive())
self.assertEqual(dp._log.level, log.NOTSET)
@@ -248,7 +249,7 @@ def check_dp_exc():
dp.lock2.release() # dummy_event2 tests its log level + finishes
while dp.t2_step != 2:
check_dp_exc()
- t2.join(.1)
+ t2.join(0.1)
self.assertFalse(t2.is_alive())
except Exception:
@@ -263,17 +264,16 @@ def check_dp_exc():
raise
def test_root_logger_levels(self):
- """Root logger level should be shared between threads.
- """
- self.config['threaded'] = True
+ """Root logger level should be shared between threads."""
+ self.config["threaded"] = True
- blog.getLogger('beets').set_global_level(blog.WARNING)
+ blog.getLogger("beets").set_global_level(blog.WARNING)
with helper.capture_log() as logs:
importer = self.create_importer()
importer.run()
self.assertEqual(logs, [])
- blog.getLogger('beets').set_global_level(blog.INFO)
+ blog.getLogger("beets").set_global_level(blog.INFO)
with helper.capture_log() as logs:
importer = self.create_importer()
importer.run()
@@ -281,7 +281,7 @@ def test_root_logger_levels(self):
self.assertIn("import", l)
self.assertIn("album", l)
- blog.getLogger('beets').set_global_level(blog.DEBUG)
+ blog.getLogger("beets").set_global_level(blog.DEBUG)
with helper.capture_log() as logs:
importer = self.create_importer()
importer.run()
@@ -292,5 +292,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_m3ufile.py b/test/test_m3ufile.py
index 2c1284aabb..be0517a78c 100644
--- a/test/test_m3ufile.py
+++ b/test/test_m3ufile.py
@@ -14,23 +14,24 @@
"""Testsuite for the M3UFile class."""
+import sys
+import unittest
from os import path
-from tempfile import mkdtemp
from shutil import rmtree
-import unittest
-import sys
+from tempfile import mkdtemp
+from test._common import RSRC
from beets.util import bytestring_path
-from beets.util.m3u import M3UFile, EmptyPlaylistError
-from test._common import RSRC
+from beets.util.m3u import EmptyPlaylistError, M3UFile
class M3UFileTest(unittest.TestCase):
"""Tests the M3UFile class."""
+
def test_playlist_write_empty(self):
"""Test whether saving an empty playlist file raises an error."""
tempdir = bytestring_path(mkdtemp())
- the_playlist_file = path.join(tempdir, b'playlist.m3u8')
+ the_playlist_file = path.join(tempdir, b"playlist.m3u8")
m3ufile = M3UFile(the_playlist_file)
with self.assertRaises(EmptyPlaylistError):
m3ufile.write()
@@ -39,12 +40,14 @@ def test_playlist_write_empty(self):
def test_playlist_write(self):
"""Test saving ascii paths to a playlist file."""
tempdir = bytestring_path(mkdtemp())
- the_playlist_file = path.join(tempdir, b'playlist.m3u')
+ the_playlist_file = path.join(tempdir, b"playlist.m3u")
m3ufile = M3UFile(the_playlist_file)
- m3ufile.set_contents([
- bytestring_path('/This/is/a/path/to_a_file.mp3'),
- bytestring_path('/This/is/another/path/to_a_file.mp3')
- ])
+ m3ufile.set_contents(
+ [
+ bytestring_path("/This/is/a/path/to_a_file.mp3"),
+ bytestring_path("/This/is/another/path/to_a_file.mp3"),
+ ]
+ )
m3ufile.write()
self.assertTrue(path.exists(the_playlist_file))
rmtree(tempdir)
@@ -52,27 +55,32 @@ def test_playlist_write(self):
def test_playlist_write_unicode(self):
"""Test saving unicode paths to a playlist file."""
tempdir = bytestring_path(mkdtemp())
- the_playlist_file = path.join(tempdir, b'playlist.m3u8')
+ the_playlist_file = path.join(tempdir, b"playlist.m3u8")
m3ufile = M3UFile(the_playlist_file)
- m3ufile.set_contents([
- bytestring_path('/This/is/å/path/to_a_file.mp3'),
- bytestring_path('/This/is/another/path/tö_a_file.mp3')
- ])
+ m3ufile.set_contents(
+ [
+ bytestring_path("/This/is/å/path/to_a_file.mp3"),
+ bytestring_path("/This/is/another/path/tö_a_file.mp3"),
+ ]
+ )
m3ufile.write()
self.assertTrue(path.exists(the_playlist_file))
rmtree(tempdir)
- @unittest.skipUnless(sys.platform == 'win32', 'win32')
+ @unittest.skipUnless(sys.platform == "win32", "win32")
def test_playlist_write_and_read_unicode_windows(self):
"""Test saving unicode paths to a playlist file on Windows."""
tempdir = bytestring_path(mkdtemp())
- the_playlist_file = path.join(tempdir,
- b'playlist_write_and_read_windows.m3u8')
+ the_playlist_file = path.join(
+ tempdir, b"playlist_write_and_read_windows.m3u8"
+ )
m3ufile = M3UFile(the_playlist_file)
- m3ufile.set_contents([
- bytestring_path(r"x:\This\is\å\path\to_a_file.mp3"),
- bytestring_path(r"x:\This\is\another\path\tö_a_file.mp3")
- ])
+ m3ufile.set_contents(
+ [
+ bytestring_path(r"x:\This\is\å\path\to_a_file.mp3"),
+ bytestring_path(r"x:\This\is\another\path\tö_a_file.mp3"),
+ ]
+ )
m3ufile.write()
self.assertTrue(path.exists(the_playlist_file))
m3ufile_read = M3UFile(the_playlist_file)
@@ -80,57 +88,63 @@ def test_playlist_write_and_read_unicode_windows(self):
self.assertEqual(
m3ufile.media_list[0],
bytestring_path(
- path.join('x:\\', 'This', 'is', 'å', 'path', 'to_a_file.mp3'))
+ path.join("x:\\", "This", "is", "å", "path", "to_a_file.mp3")
+ ),
)
self.assertEqual(
m3ufile.media_list[1],
bytestring_path(r"x:\This\is\another\path\tö_a_file.mp3"),
- bytestring_path(path.join(
- 'x:\\', 'This', 'is', 'another', 'path', 'tö_a_file.mp3'))
+ bytestring_path(
+ path.join(
+ "x:\\", "This", "is", "another", "path", "tö_a_file.mp3"
+ )
+ ),
)
rmtree(tempdir)
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_playlist_load_ascii(self):
"""Test loading ascii paths from a playlist file."""
- the_playlist_file = path.join(RSRC, b'playlist.m3u')
+ the_playlist_file = path.join(RSRC, b"playlist.m3u")
m3ufile = M3UFile(the_playlist_file)
m3ufile.load()
- self.assertEqual(m3ufile.media_list[0],
- bytestring_path('/This/is/a/path/to_a_file.mp3'))
+ self.assertEqual(
+ m3ufile.media_list[0],
+ bytestring_path("/This/is/a/path/to_a_file.mp3"),
+ )
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_playlist_load_unicode(self):
"""Test loading unicode paths from a playlist file."""
- the_playlist_file = path.join(RSRC, b'playlist.m3u8')
+ the_playlist_file = path.join(RSRC, b"playlist.m3u8")
m3ufile = M3UFile(the_playlist_file)
m3ufile.load()
- self.assertEqual(m3ufile.media_list[0],
- bytestring_path('/This/is/å/path/to_a_file.mp3'))
+ self.assertEqual(
+ m3ufile.media_list[0],
+ bytestring_path("/This/is/å/path/to_a_file.mp3"),
+ )
- @unittest.skipUnless(sys.platform == 'win32', 'win32')
+ @unittest.skipUnless(sys.platform == "win32", "win32")
def test_playlist_load_unicode_windows(self):
"""Test loading unicode paths from a playlist file."""
- the_playlist_file = path.join(RSRC, b'playlist_windows.m3u8')
- winpath = bytestring_path(path.join(
- 'x:\\', 'This', 'is', 'å', 'path', 'to_a_file.mp3'))
+ the_playlist_file = path.join(RSRC, b"playlist_windows.m3u8")
+ winpath = bytestring_path(
+ path.join("x:\\", "This", "is", "å", "path", "to_a_file.mp3")
+ )
m3ufile = M3UFile(the_playlist_file)
m3ufile.load()
- self.assertEqual(
- m3ufile.media_list[0],
- winpath
- )
+ self.assertEqual(m3ufile.media_list[0], winpath)
def test_playlist_load_extm3u(self):
"""Test loading a playlist with an #EXTM3U header."""
- the_playlist_file = path.join(RSRC, b'playlist.m3u')
+ the_playlist_file = path.join(RSRC, b"playlist.m3u")
m3ufile = M3UFile(the_playlist_file)
m3ufile.load()
self.assertTrue(m3ufile.extm3u)
def test_playlist_load_non_extm3u(self):
"""Test loading a playlist without an #EXTM3U header."""
- the_playlist_file = path.join(RSRC, b'playlist_non_ext.m3u')
+ the_playlist_file = path.join(RSRC, b"playlist_non_ext.m3u")
m3ufile = M3UFile(the_playlist_file)
m3ufile.load()
self.assertFalse(m3ufile.extm3u)
@@ -141,5 +155,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_mb.py b/test/test_mb.py
index 9e6ac906a7..71a0294a2e 100644
--- a/test/test_mb.py
+++ b/test/test_mb.py
@@ -15,70 +15,74 @@
"""Tests for MusicBrainz API wrapper.
"""
-from test import _common
-from beets.autotag import mb
-from beets import config
-
import unittest
+from test import _common
from unittest import mock
+from beets import config
+from beets.autotag import mb
+
class MBAlbumInfoTest(_common.TestCase):
def _make_release(
self,
- date_str='2009',
+ date_str="2009",
tracks=None,
track_length=None,
track_artist=False,
multi_artist_credit=False,
data_tracks=None,
- medium_format='FORMAT'
+ medium_format="FORMAT",
):
release = {
- 'title': 'ALBUM TITLE',
- 'id': 'ALBUM ID',
- 'asin': 'ALBUM ASIN',
- 'disambiguation': 'R_DISAMBIGUATION',
- 'release-group': {
- 'type': 'Album',
- 'first-release-date': date_str,
- 'id': 'RELEASE GROUP ID',
- 'disambiguation': 'RG_DISAMBIGUATION',
+ "title": "ALBUM TITLE",
+ "id": "ALBUM ID",
+ "asin": "ALBUM ASIN",
+ "disambiguation": "R_DISAMBIGUATION",
+ "release-group": {
+ "type": "Album",
+ "first-release-date": date_str,
+ "id": "RELEASE GROUP ID",
+ "disambiguation": "RG_DISAMBIGUATION",
},
- 'artist-credit': [
+ "artist-credit": [
{
- 'artist': {
- 'name': 'ARTIST NAME',
- 'id': 'ARTIST ID',
- 'sort-name': 'ARTIST SORT NAME',
+ "artist": {
+ "name": "ARTIST NAME",
+ "id": "ARTIST ID",
+ "sort-name": "ARTIST SORT NAME",
},
- 'name': 'ARTIST CREDIT',
+ "name": "ARTIST CREDIT",
}
],
- 'date': '3001',
- 'medium-list': [],
- 'label-info-list': [{
- 'catalog-number': 'CATALOG NUMBER',
- 'label': {'name': 'LABEL NAME'},
- }],
- 'text-representation': {
- 'script': 'SCRIPT',
- 'language': 'LANGUAGE',
+ "date": "3001",
+ "medium-list": [],
+ "label-info-list": [
+ {
+ "catalog-number": "CATALOG NUMBER",
+ "label": {"name": "LABEL NAME"},
+ }
+ ],
+ "text-representation": {
+ "script": "SCRIPT",
+ "language": "LANGUAGE",
},
- 'country': 'COUNTRY',
- 'status': 'STATUS',
+ "country": "COUNTRY",
+ "status": "STATUS",
}
if multi_artist_credit:
- release['artist-credit'].append(" & ") # add join phase
- release['artist-credit'].append({
- 'artist': {
- 'name': 'ARTIST 2 NAME',
- 'id': 'ARTIST 2 ID',
- 'sort-name': 'ARTIST 2 SORT NAME',
- },
- 'name': 'ARTIST MULTI CREDIT',
- })
+ release["artist-credit"].append(" & ") # add join phase
+ release["artist-credit"].append(
+ {
+ "artist": {
+ "name": "ARTIST 2 NAME",
+ "id": "ARTIST 2 ID",
+ "sort-name": "ARTIST 2 SORT NAME",
+ },
+ "name": "ARTIST MULTI CREDIT",
+ }
+ )
i = 0
track_list = []
@@ -86,38 +90,38 @@ def _make_release(
for recording in tracks:
i += 1
track = {
- 'id': 'RELEASE TRACK ID %d' % i,
- 'recording': recording,
- 'position': i,
- 'number': 'A1',
+ "id": "RELEASE TRACK ID %d" % i,
+ "recording": recording,
+ "position": i,
+ "number": "A1",
}
if track_length:
# Track lengths are distinct from recording lengths.
- track['length'] = track_length
+ track["length"] = track_length
if track_artist:
# Similarly, track artists can differ from recording
# artists.
- track['artist-credit'] = [
+ track["artist-credit"] = [
{
- 'artist': {
- 'name': 'TRACK ARTIST NAME',
- 'id': 'TRACK ARTIST ID',
- 'sort-name': 'TRACK ARTIST SORT NAME',
+ "artist": {
+ "name": "TRACK ARTIST NAME",
+ "id": "TRACK ARTIST ID",
+ "sort-name": "TRACK ARTIST SORT NAME",
},
- 'name': 'TRACK ARTIST CREDIT',
+ "name": "TRACK ARTIST CREDIT",
}
]
if multi_artist_credit:
- track['artist-credit'].append(" & ") # add join phase
- track['artist-credit'].append(
+ track["artist-credit"].append(" & ") # add join phase
+ track["artist-credit"].append(
{
- 'artist': {
- 'name': 'TRACK ARTIST 2 NAME',
- 'id': 'TRACK ARTIST 2 ID',
- 'sort-name': 'TRACK ARTIST 2 SORT NAME',
+ "artist": {
+ "name": "TRACK ARTIST 2 NAME",
+ "id": "TRACK ARTIST 2 ID",
+ "sort-name": "TRACK ARTIST 2 SORT NAME",
},
- 'name': 'TRACK ARTIST 2 CREDIT',
+ "name": "TRACK ARTIST 2 CREDIT",
}
)
@@ -127,19 +131,21 @@ def _make_release(
for recording in data_tracks:
i += 1
data_track = {
- 'id': 'RELEASE TRACK ID %d' % i,
- 'recording': recording,
- 'position': i,
- 'number': 'A1',
+ "id": "RELEASE TRACK ID %d" % i,
+ "recording": recording,
+ "position": i,
+ "number": "A1",
}
data_track_list.append(data_track)
- release['medium-list'].append({
- 'position': '1',
- 'track-list': track_list,
- 'data-track-list': data_track_list,
- 'format': medium_format,
- 'title': 'MEDIUM TITLE',
- })
+ release["medium-list"].append(
+ {
+ "position": "1",
+ "track-list": track_list,
+ "data-track-list": data_track_list,
+ "format": medium_format,
+ "title": "MEDIUM TITLE",
+ }
+ )
return release
def _make_track(
@@ -151,100 +157,103 @@ def _make_track(
video=False,
disambiguation=None,
remixer=False,
- multi_artist_credit=False
+ multi_artist_credit=False,
):
track = {
- 'title': title,
- 'id': tr_id,
+ "title": title,
+ "id": tr_id,
}
if duration is not None:
- track['length'] = duration
+ track["length"] = duration
if artist:
- track['artist-credit'] = [
+ track["artist-credit"] = [
{
- 'artist': {
- 'name': 'RECORDING ARTIST NAME',
- 'id': 'RECORDING ARTIST ID',
- 'sort-name': 'RECORDING ARTIST SORT NAME',
+ "artist": {
+ "name": "RECORDING ARTIST NAME",
+ "id": "RECORDING ARTIST ID",
+ "sort-name": "RECORDING ARTIST SORT NAME",
},
- 'name': 'RECORDING ARTIST CREDIT',
+ "name": "RECORDING ARTIST CREDIT",
}
]
if multi_artist_credit:
- track['artist-credit'].append(" & ") # add join phase
- track['artist-credit'].append(
+ track["artist-credit"].append(" & ") # add join phase
+ track["artist-credit"].append(
{
- 'artist': {
- 'name': 'RECORDING ARTIST 2 NAME',
- 'id': 'RECORDING ARTIST 2 ID',
- 'sort-name': 'RECORDING ARTIST 2 SORT NAME',
+ "artist": {
+ "name": "RECORDING ARTIST 2 NAME",
+ "id": "RECORDING ARTIST 2 ID",
+ "sort-name": "RECORDING ARTIST 2 SORT NAME",
},
- 'name': 'RECORDING ARTIST 2 CREDIT',
+ "name": "RECORDING ARTIST 2 CREDIT",
}
)
if remixer:
- track['artist-relation-list'] = [
+ track["artist-relation-list"] = [
{
- 'type': 'remixer',
- 'type-id': 'RELATION TYPE ID',
- 'target': 'RECORDING REMIXER ARTIST ID',
- 'direction': 'RECORDING RELATION DIRECTION',
- 'artist':
- {
- 'id': 'RECORDING REMIXER ARTIST ID',
- 'type': 'RECORDING REMIXER ARTIST TYPE',
- 'name': 'RECORDING REMIXER ARTIST NAME',
- 'sort-name': 'RECORDING REMIXER ARTIST SORT NAME'
- }
+ "type": "remixer",
+ "type-id": "RELATION TYPE ID",
+ "target": "RECORDING REMIXER ARTIST ID",
+ "direction": "RECORDING RELATION DIRECTION",
+ "artist": {
+ "id": "RECORDING REMIXER ARTIST ID",
+ "type": "RECORDING REMIXER ARTIST TYPE",
+ "name": "RECORDING REMIXER ARTIST NAME",
+ "sort-name": "RECORDING REMIXER ARTIST SORT NAME",
+ },
}
]
if video:
- track['video'] = 'true'
+ track["video"] = "true"
if disambiguation:
- track['disambiguation'] = disambiguation
+ track["disambiguation"] = disambiguation
return track
def test_parse_release_with_year(self):
- release = self._make_release('1984')
+ release = self._make_release("1984")
d = mb.album_info(release)
- self.assertEqual(d.album, 'ALBUM TITLE')
- self.assertEqual(d.album_id, 'ALBUM ID')
- self.assertEqual(d.artist, 'ARTIST NAME')
- self.assertEqual(d.artist_id, 'ARTIST ID')
+ self.assertEqual(d.album, "ALBUM TITLE")
+ self.assertEqual(d.album_id, "ALBUM ID")
+ self.assertEqual(d.artist, "ARTIST NAME")
+ self.assertEqual(d.artist_id, "ARTIST ID")
self.assertEqual(d.original_year, 1984)
self.assertEqual(d.year, 3001)
- self.assertEqual(d.artist_credit, 'ARTIST CREDIT')
+ self.assertEqual(d.artist_credit, "ARTIST CREDIT")
def test_parse_release_type(self):
- release = self._make_release('1984')
+ release = self._make_release("1984")
d = mb.album_info(release)
- self.assertEqual(d.albumtype, 'album')
+ self.assertEqual(d.albumtype, "album")
def test_parse_release_full_date(self):
- release = self._make_release('1987-03-31')
+ release = self._make_release("1987-03-31")
d = mb.album_info(release)
self.assertEqual(d.original_year, 1987)
self.assertEqual(d.original_month, 3)
self.assertEqual(d.original_day, 31)
def test_parse_tracks(self):
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
release = self._make_release(tracks=tracks)
d = mb.album_info(release)
t = d.tracks
self.assertEqual(len(t), 2)
- self.assertEqual(t[0].title, 'TITLE ONE')
- self.assertEqual(t[0].track_id, 'ID ONE')
+ self.assertEqual(t[0].title, "TITLE ONE")
+ self.assertEqual(t[0].track_id, "ID ONE")
self.assertEqual(t[0].length, 100.0)
- self.assertEqual(t[1].title, 'TITLE TWO')
- self.assertEqual(t[1].track_id, 'ID TWO')
+ self.assertEqual(t[1].title, "TITLE TWO")
+ self.assertEqual(t[1].track_id, "ID TWO")
self.assertEqual(t[1].length, 200.0)
def test_parse_track_indices(self):
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
release = self._make_release(tracks=tracks)
d = mb.album_info(release)
@@ -255,8 +264,10 @@ def test_parse_track_indices(self):
self.assertEqual(t[1].index, 2)
def test_parse_medium_numbers_single_medium(self):
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
release = self._make_release(tracks=tracks)
d = mb.album_info(release)
@@ -266,19 +277,25 @@ def test_parse_medium_numbers_single_medium(self):
self.assertEqual(t[1].medium, 1)
def test_parse_medium_numbers_two_mediums(self):
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
release = self._make_release(tracks=[tracks[0]])
- second_track_list = [{
- 'id': 'RELEASE TRACK ID 2',
- 'recording': tracks[1],
- 'position': '1',
- 'number': 'A1',
- }]
- release['medium-list'].append({
- 'position': '2',
- 'track-list': second_track_list,
- })
+ second_track_list = [
+ {
+ "id": "RELEASE TRACK ID 2",
+ "recording": tracks[1],
+ "position": "1",
+ "number": "A1",
+ }
+ ]
+ release["medium-list"].append(
+ {
+ "position": "2",
+ "track-list": second_track_list,
+ }
+ )
d = mb.album_info(release)
self.assertEqual(d.mediums, 2)
@@ -291,19 +308,19 @@ def test_parse_medium_numbers_two_mediums(self):
self.assertEqual(t[1].index, 2)
def test_parse_release_year_month_only(self):
- release = self._make_release('1987-03')
+ release = self._make_release("1987-03")
d = mb.album_info(release)
self.assertEqual(d.original_year, 1987)
self.assertEqual(d.original_month, 3)
def test_no_durations(self):
- tracks = [self._make_track('TITLE', 'ID', None)]
+ tracks = [self._make_track("TITLE", "ID", None)]
release = self._make_release(tracks=tracks)
d = mb.album_info(release)
self.assertEqual(d.tracks[0].length, None)
def test_track_length_overrides_recording_length(self):
- tracks = [self._make_track('TITLE', 'ID', 1.0 * 1000.0)]
+ tracks = [self._make_track("TITLE", "ID", 1.0 * 1000.0)]
release = self._make_release(tracks=tracks, track_length=2.0 * 1000.0)
d = mb.album_info(release)
self.assertEqual(d.tracks[0].length, 2.0)
@@ -322,298 +339,314 @@ def test_various_artists_defaults_false(self):
def test_detect_various_artists(self):
release = self._make_release(None)
- release['artist-credit'][0]['artist']['id'] = \
- mb.VARIOUS_ARTISTS_ID
+ release["artist-credit"][0]["artist"]["id"] = mb.VARIOUS_ARTISTS_ID
d = mb.album_info(release)
self.assertTrue(d.va)
def test_parse_artist_sort_name(self):
release = self._make_release(None)
d = mb.album_info(release)
- self.assertEqual(d.artist_sort, 'ARTIST SORT NAME')
+ self.assertEqual(d.artist_sort, "ARTIST SORT NAME")
def test_parse_releasegroupid(self):
release = self._make_release(None)
d = mb.album_info(release)
- self.assertEqual(d.releasegroup_id, 'RELEASE GROUP ID')
+ self.assertEqual(d.releasegroup_id, "RELEASE GROUP ID")
def test_parse_asin(self):
release = self._make_release(None)
d = mb.album_info(release)
- self.assertEqual(d.asin, 'ALBUM ASIN')
+ self.assertEqual(d.asin, "ALBUM ASIN")
def test_parse_catalognum(self):
release = self._make_release(None)
d = mb.album_info(release)
- self.assertEqual(d.catalognum, 'CATALOG NUMBER')
+ self.assertEqual(d.catalognum, "CATALOG NUMBER")
def test_parse_textrepr(self):
release = self._make_release(None)
d = mb.album_info(release)
- self.assertEqual(d.script, 'SCRIPT')
- self.assertEqual(d.language, 'LANGUAGE')
+ self.assertEqual(d.script, "SCRIPT")
+ self.assertEqual(d.language, "LANGUAGE")
def test_parse_country(self):
release = self._make_release(None)
d = mb.album_info(release)
- self.assertEqual(d.country, 'COUNTRY')
+ self.assertEqual(d.country, "COUNTRY")
def test_parse_status(self):
release = self._make_release(None)
d = mb.album_info(release)
- self.assertEqual(d.albumstatus, 'STATUS')
+ self.assertEqual(d.albumstatus, "STATUS")
def test_parse_media(self):
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
release = self._make_release(None, tracks=tracks)
d = mb.album_info(release)
- self.assertEqual(d.media, 'FORMAT')
+ self.assertEqual(d.media, "FORMAT")
def test_parse_disambig(self):
release = self._make_release(None)
d = mb.album_info(release)
- self.assertEqual(d.albumdisambig, 'R_DISAMBIGUATION')
- self.assertEqual(d.releasegroupdisambig, 'RG_DISAMBIGUATION')
+ self.assertEqual(d.albumdisambig, "R_DISAMBIGUATION")
+ self.assertEqual(d.releasegroupdisambig, "RG_DISAMBIGUATION")
def test_parse_disctitle(self):
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
release = self._make_release(None, tracks=tracks)
d = mb.album_info(release)
t = d.tracks
- self.assertEqual(t[0].disctitle, 'MEDIUM TITLE')
- self.assertEqual(t[1].disctitle, 'MEDIUM TITLE')
+ self.assertEqual(t[0].disctitle, "MEDIUM TITLE")
+ self.assertEqual(t[1].disctitle, "MEDIUM TITLE")
def test_missing_language(self):
release = self._make_release(None)
- del release['text-representation']['language']
+ del release["text-representation"]["language"]
d = mb.album_info(release)
self.assertEqual(d.language, None)
def test_parse_recording_artist(self):
- tracks = [self._make_track('a', 'b', 1, True)]
+ tracks = [self._make_track("a", "b", 1, True)]
release = self._make_release(None, tracks=tracks)
track = mb.album_info(release).tracks[0]
- self.assertEqual(track.artist, 'RECORDING ARTIST NAME')
- self.assertEqual(track.artist_id, 'RECORDING ARTIST ID')
- self.assertEqual(track.artist_sort, 'RECORDING ARTIST SORT NAME')
- self.assertEqual(track.artist_credit, 'RECORDING ARTIST CREDIT')
+ self.assertEqual(track.artist, "RECORDING ARTIST NAME")
+ self.assertEqual(track.artist_id, "RECORDING ARTIST ID")
+ self.assertEqual(track.artist_sort, "RECORDING ARTIST SORT NAME")
+ self.assertEqual(track.artist_credit, "RECORDING ARTIST CREDIT")
def test_parse_recording_artist_multi(self):
- tracks = [
- self._make_track(
- 'a', 'b', 1, True, multi_artist_credit=True
- )
- ]
+ tracks = [self._make_track("a", "b", 1, True, multi_artist_credit=True)]
release = self._make_release(None, tracks=tracks)
track = mb.album_info(release).tracks[0]
self.assertEqual(
- track.artist,
- 'RECORDING ARTIST NAME & RECORDING ARTIST 2 NAME'
- )
- self.assertEqual(
- track.artist_id, 'RECORDING ARTIST ID'
+ track.artist, "RECORDING ARTIST NAME & RECORDING ARTIST 2 NAME"
)
+ self.assertEqual(track.artist_id, "RECORDING ARTIST ID")
self.assertEqual(
track.artist_sort,
- 'RECORDING ARTIST SORT NAME & RECORDING ARTIST 2 SORT NAME'
+ "RECORDING ARTIST SORT NAME & RECORDING ARTIST 2 SORT NAME",
)
self.assertEqual(
track.artist_credit,
- 'RECORDING ARTIST CREDIT & RECORDING ARTIST 2 CREDIT'
+ "RECORDING ARTIST CREDIT & RECORDING ARTIST 2 CREDIT",
)
self.assertEqual(
- track.artists,
- ['RECORDING ARTIST NAME', 'RECORDING ARTIST 2 NAME']
+ track.artists, ["RECORDING ARTIST NAME", "RECORDING ARTIST 2 NAME"]
)
self.assertEqual(
- track.artists_ids, ['RECORDING ARTIST ID', 'RECORDING ARTIST 2 ID']
+ track.artists_ids, ["RECORDING ARTIST ID", "RECORDING ARTIST 2 ID"]
)
self.assertEqual(
track.artists_sort,
- [
- 'RECORDING ARTIST SORT NAME',
- 'RECORDING ARTIST 2 SORT NAME'
- ]
+ ["RECORDING ARTIST SORT NAME", "RECORDING ARTIST 2 SORT NAME"],
)
self.assertEqual(
track.artists_credit,
- [
- 'RECORDING ARTIST CREDIT',
- 'RECORDING ARTIST 2 CREDIT'
- ]
+ ["RECORDING ARTIST CREDIT", "RECORDING ARTIST 2 CREDIT"],
)
def test_track_artist_overrides_recording_artist(self):
- tracks = [self._make_track('a', 'b', 1, True)]
+ tracks = [self._make_track("a", "b", 1, True)]
release = self._make_release(None, tracks=tracks, track_artist=True)
track = mb.album_info(release).tracks[0]
- self.assertEqual(track.artist, 'TRACK ARTIST NAME')
- self.assertEqual(track.artist_id, 'TRACK ARTIST ID')
- self.assertEqual(track.artist_sort, 'TRACK ARTIST SORT NAME')
- self.assertEqual(track.artist_credit, 'TRACK ARTIST CREDIT')
+ self.assertEqual(track.artist, "TRACK ARTIST NAME")
+ self.assertEqual(track.artist_id, "TRACK ARTIST ID")
+ self.assertEqual(track.artist_sort, "TRACK ARTIST SORT NAME")
+ self.assertEqual(track.artist_credit, "TRACK ARTIST CREDIT")
def test_track_artist_overrides_recording_artist_multi(self):
- tracks = [
- self._make_track('a', 'b', 1, True, multi_artist_credit=True)
- ]
+ tracks = [self._make_track("a", "b", 1, True, multi_artist_credit=True)]
release = self._make_release(
- None,
- tracks=tracks,
- track_artist=True,
- multi_artist_credit=True
+ None, tracks=tracks, track_artist=True, multi_artist_credit=True
)
track = mb.album_info(release).tracks[0]
self.assertEqual(
- track.artist,
- 'TRACK ARTIST NAME & TRACK ARTIST 2 NAME'
+ track.artist, "TRACK ARTIST NAME & TRACK ARTIST 2 NAME"
)
- self.assertEqual(track.artist_id, 'TRACK ARTIST ID')
+ self.assertEqual(track.artist_id, "TRACK ARTIST ID")
self.assertEqual(
track.artist_sort,
- 'TRACK ARTIST SORT NAME & TRACK ARTIST 2 SORT NAME'
+ "TRACK ARTIST SORT NAME & TRACK ARTIST 2 SORT NAME",
)
self.assertEqual(
- track.artist_credit,
- 'TRACK ARTIST CREDIT & TRACK ARTIST 2 CREDIT'
+ track.artist_credit, "TRACK ARTIST CREDIT & TRACK ARTIST 2 CREDIT"
)
self.assertEqual(
- track.artists,
- ['TRACK ARTIST NAME', 'TRACK ARTIST 2 NAME']
+ track.artists, ["TRACK ARTIST NAME", "TRACK ARTIST 2 NAME"]
)
self.assertEqual(
- track.artists_ids,
- ['TRACK ARTIST ID', 'TRACK ARTIST 2 ID']
+ track.artists_ids, ["TRACK ARTIST ID", "TRACK ARTIST 2 ID"]
)
self.assertEqual(
track.artists_sort,
- ['TRACK ARTIST SORT NAME', 'TRACK ARTIST 2 SORT NAME']
+ ["TRACK ARTIST SORT NAME", "TRACK ARTIST 2 SORT NAME"],
)
self.assertEqual(
track.artists_credit,
- ['TRACK ARTIST CREDIT', 'TRACK ARTIST 2 CREDIT']
+ ["TRACK ARTIST CREDIT", "TRACK ARTIST 2 CREDIT"],
)
def test_parse_recording_remixer(self):
- tracks = [self._make_track('a', 'b', 1, remixer=True)]
+ tracks = [self._make_track("a", "b", 1, remixer=True)]
release = self._make_release(None, tracks=tracks)
track = mb.album_info(release).tracks[0]
- self.assertEqual(track.remixer, 'RECORDING REMIXER ARTIST NAME')
+ self.assertEqual(track.remixer, "RECORDING REMIXER ARTIST NAME")
def test_data_source(self):
release = self._make_release()
d = mb.album_info(release)
- self.assertEqual(d.data_source, 'MusicBrainz')
+ self.assertEqual(d.data_source, "MusicBrainz")
def test_ignored_media(self):
- config['match']['ignored_media'] = ['IGNORED1', 'IGNORED2']
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
+ config["match"]["ignored_media"] = ["IGNORED1", "IGNORED2"]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
release = self._make_release(tracks=tracks, medium_format="IGNORED1")
d = mb.album_info(release)
self.assertEqual(len(d.tracks), 0)
def test_no_ignored_media(self):
- config['match']['ignored_media'] = ['IGNORED1', 'IGNORED2']
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
- release = self._make_release(tracks=tracks,
- medium_format="NON-IGNORED")
+ config["match"]["ignored_media"] = ["IGNORED1", "IGNORED2"]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
+ release = self._make_release(tracks=tracks, medium_format="NON-IGNORED")
d = mb.album_info(release)
self.assertEqual(len(d.tracks), 2)
def test_skip_data_track(self):
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('[data track]', 'ID DATA TRACK',
- 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("[data track]", "ID DATA TRACK", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
release = self._make_release(tracks=tracks)
d = mb.album_info(release)
self.assertEqual(len(d.tracks), 2)
- self.assertEqual(d.tracks[0].title, 'TITLE ONE')
- self.assertEqual(d.tracks[1].title, 'TITLE TWO')
+ self.assertEqual(d.tracks[0].title, "TITLE ONE")
+ self.assertEqual(d.tracks[1].title, "TITLE TWO")
def test_skip_audio_data_tracks_by_default(self):
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
- data_tracks = [self._make_track('TITLE AUDIO DATA', 'ID DATA TRACK',
- 100.0 * 1000.0)]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
+ data_tracks = [
+ self._make_track(
+ "TITLE AUDIO DATA", "ID DATA TRACK", 100.0 * 1000.0
+ )
+ ]
release = self._make_release(tracks=tracks, data_tracks=data_tracks)
d = mb.album_info(release)
self.assertEqual(len(d.tracks), 2)
- self.assertEqual(d.tracks[0].title, 'TITLE ONE')
- self.assertEqual(d.tracks[1].title, 'TITLE TWO')
+ self.assertEqual(d.tracks[0].title, "TITLE ONE")
+ self.assertEqual(d.tracks[1].title, "TITLE TWO")
def test_no_skip_audio_data_tracks_if_configured(self):
- config['match']['ignore_data_tracks'] = False
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
- data_tracks = [self._make_track('TITLE AUDIO DATA', 'ID DATA TRACK',
- 100.0 * 1000.0)]
+ config["match"]["ignore_data_tracks"] = False
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
+ data_tracks = [
+ self._make_track(
+ "TITLE AUDIO DATA", "ID DATA TRACK", 100.0 * 1000.0
+ )
+ ]
release = self._make_release(tracks=tracks, data_tracks=data_tracks)
d = mb.album_info(release)
self.assertEqual(len(d.tracks), 3)
- self.assertEqual(d.tracks[0].title, 'TITLE ONE')
- self.assertEqual(d.tracks[1].title, 'TITLE TWO')
- self.assertEqual(d.tracks[2].title, 'TITLE AUDIO DATA')
+ self.assertEqual(d.tracks[0].title, "TITLE ONE")
+ self.assertEqual(d.tracks[1].title, "TITLE TWO")
+ self.assertEqual(d.tracks[2].title, "TITLE AUDIO DATA")
def test_skip_video_tracks_by_default(self):
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE VIDEO', 'ID VIDEO', 100.0 * 1000.0,
- False, True),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track(
+ "TITLE VIDEO", "ID VIDEO", 100.0 * 1000.0, False, True
+ ),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
release = self._make_release(tracks=tracks)
d = mb.album_info(release)
self.assertEqual(len(d.tracks), 2)
- self.assertEqual(d.tracks[0].title, 'TITLE ONE')
- self.assertEqual(d.tracks[1].title, 'TITLE TWO')
+ self.assertEqual(d.tracks[0].title, "TITLE ONE")
+ self.assertEqual(d.tracks[1].title, "TITLE TWO")
def test_skip_video_data_tracks_by_default(self):
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
- data_tracks = [self._make_track('TITLE VIDEO', 'ID VIDEO',
- 100.0 * 1000.0, False, True)]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
+ data_tracks = [
+ self._make_track(
+ "TITLE VIDEO", "ID VIDEO", 100.0 * 1000.0, False, True
+ )
+ ]
release = self._make_release(tracks=tracks, data_tracks=data_tracks)
d = mb.album_info(release)
self.assertEqual(len(d.tracks), 2)
- self.assertEqual(d.tracks[0].title, 'TITLE ONE')
- self.assertEqual(d.tracks[1].title, 'TITLE TWO')
+ self.assertEqual(d.tracks[0].title, "TITLE ONE")
+ self.assertEqual(d.tracks[1].title, "TITLE TWO")
def test_no_skip_video_tracks_if_configured(self):
- config['match']['ignore_data_tracks'] = False
- config['match']['ignore_video_tracks'] = False
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE VIDEO', 'ID VIDEO', 100.0 * 1000.0,
- False, True),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
+ config["match"]["ignore_data_tracks"] = False
+ config["match"]["ignore_video_tracks"] = False
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track(
+ "TITLE VIDEO", "ID VIDEO", 100.0 * 1000.0, False, True
+ ),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
release = self._make_release(tracks=tracks)
d = mb.album_info(release)
self.assertEqual(len(d.tracks), 3)
- self.assertEqual(d.tracks[0].title, 'TITLE ONE')
- self.assertEqual(d.tracks[1].title, 'TITLE VIDEO')
- self.assertEqual(d.tracks[2].title, 'TITLE TWO')
+ self.assertEqual(d.tracks[0].title, "TITLE ONE")
+ self.assertEqual(d.tracks[1].title, "TITLE VIDEO")
+ self.assertEqual(d.tracks[2].title, "TITLE TWO")
def test_no_skip_video_data_tracks_if_configured(self):
- config['match']['ignore_data_tracks'] = False
- config['match']['ignore_video_tracks'] = False
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0)]
- data_tracks = [self._make_track('TITLE VIDEO', 'ID VIDEO',
- 100.0 * 1000.0, False, True)]
+ config["match"]["ignore_data_tracks"] = False
+ config["match"]["ignore_video_tracks"] = False
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track("TITLE TWO", "ID TWO", 200.0 * 1000.0),
+ ]
+ data_tracks = [
+ self._make_track(
+ "TITLE VIDEO", "ID VIDEO", 100.0 * 1000.0, False, True
+ )
+ ]
release = self._make_release(tracks=tracks, data_tracks=data_tracks)
d = mb.album_info(release)
self.assertEqual(len(d.tracks), 3)
- self.assertEqual(d.tracks[0].title, 'TITLE ONE')
- self.assertEqual(d.tracks[1].title, 'TITLE TWO')
- self.assertEqual(d.tracks[2].title, 'TITLE VIDEO')
+ self.assertEqual(d.tracks[0].title, "TITLE ONE")
+ self.assertEqual(d.tracks[1].title, "TITLE TWO")
+ self.assertEqual(d.tracks[2].title, "TITLE VIDEO")
def test_track_disambiguation(self):
- tracks = [self._make_track('TITLE ONE', 'ID ONE', 100.0 * 1000.0),
- self._make_track('TITLE TWO', 'ID TWO', 200.0 * 1000.0,
- disambiguation="SECOND TRACK")]
+ tracks = [
+ self._make_track("TITLE ONE", "ID ONE", 100.0 * 1000.0),
+ self._make_track(
+ "TITLE TWO",
+ "ID TWO",
+ 200.0 * 1000.0,
+ disambiguation="SECOND TRACK",
+ ),
+ ]
release = self._make_release(tracks=tracks)
d = mb.album_info(release)
@@ -642,365 +675,407 @@ def test_parse_id_url_finds_id(self):
class ArtistFlatteningTest(_common.TestCase):
- def _credit_dict(self, suffix=''):
+ def _credit_dict(self, suffix=""):
return {
- 'artist': {
- 'name': 'NAME' + suffix,
- 'sort-name': 'SORT' + suffix,
+ "artist": {
+ "name": "NAME" + suffix,
+ "sort-name": "SORT" + suffix,
},
- 'name': 'CREDIT' + suffix,
+ "name": "CREDIT" + suffix,
}
- def _add_alias(self, credit_dict, suffix='', locale='', primary=False):
+ def _add_alias(self, credit_dict, suffix="", locale="", primary=False):
alias = {
- 'alias': 'ALIAS' + suffix,
- 'locale': locale,
- 'sort-name': 'ALIASSORT' + suffix
+ "alias": "ALIAS" + suffix,
+ "locale": locale,
+ "sort-name": "ALIASSORT" + suffix,
}
if primary:
- alias['primary'] = 'primary'
- if 'alias-list' not in credit_dict['artist']:
- credit_dict['artist']['alias-list'] = []
- credit_dict['artist']['alias-list'].append(alias)
+ alias["primary"] = "primary"
+ if "alias-list" not in credit_dict["artist"]:
+ credit_dict["artist"]["alias-list"] = []
+ credit_dict["artist"]["alias-list"].append(alias)
def test_single_artist(self):
credit = [self._credit_dict()]
a, s, c = mb._flatten_artist_credit(credit)
- self.assertEqual(a, 'NAME')
- self.assertEqual(s, 'SORT')
- self.assertEqual(c, 'CREDIT')
+ self.assertEqual(a, "NAME")
+ self.assertEqual(s, "SORT")
+ self.assertEqual(c, "CREDIT")
a, s, c = mb._multi_artist_credit(credit, include_join_phrase=False)
- self.assertEqual(a, ['NAME'])
- self.assertEqual(s, ['SORT'])
- self.assertEqual(c, ['CREDIT'])
+ self.assertEqual(a, ["NAME"])
+ self.assertEqual(s, ["SORT"])
+ self.assertEqual(c, ["CREDIT"])
def test_two_artists(self):
- credit = [self._credit_dict('a'), ' AND ', self._credit_dict('b')]
- a, s, c = mb._flatten_artist_credit(
- credit
- )
- self.assertEqual(a, 'NAMEa AND NAMEb')
- self.assertEqual(s, 'SORTa AND SORTb')
- self.assertEqual(c, 'CREDITa AND CREDITb')
+ credit = [self._credit_dict("a"), " AND ", self._credit_dict("b")]
+ a, s, c = mb._flatten_artist_credit(credit)
+ self.assertEqual(a, "NAMEa AND NAMEb")
+ self.assertEqual(s, "SORTa AND SORTb")
+ self.assertEqual(c, "CREDITa AND CREDITb")
a, s, c = mb._multi_artist_credit(credit, include_join_phrase=False)
- self.assertEqual(a, ['NAMEa', 'NAMEb'])
- self.assertEqual(s, ['SORTa', 'SORTb'])
- self.assertEqual(c, ['CREDITa', 'CREDITb'])
+ self.assertEqual(a, ["NAMEa", "NAMEb"])
+ self.assertEqual(s, ["SORTa", "SORTb"])
+ self.assertEqual(c, ["CREDITa", "CREDITb"])
def test_alias(self):
credit_dict = self._credit_dict()
- self._add_alias(credit_dict, suffix='en', locale='en', primary=True)
- self._add_alias(credit_dict, suffix='en_GB', locale='en_GB',
- primary=True)
- self._add_alias(credit_dict, suffix='fr', locale='fr')
- self._add_alias(credit_dict, suffix='fr_P', locale='fr', primary=True)
- self._add_alias(credit_dict, suffix='pt_BR', locale='pt_BR')
+ self._add_alias(credit_dict, suffix="en", locale="en", primary=True)
+ self._add_alias(
+ credit_dict, suffix="en_GB", locale="en_GB", primary=True
+ )
+ self._add_alias(credit_dict, suffix="fr", locale="fr")
+ self._add_alias(credit_dict, suffix="fr_P", locale="fr", primary=True)
+ self._add_alias(credit_dict, suffix="pt_BR", locale="pt_BR")
# test no alias
- config['import']['languages'] = ['']
+ config["import"]["languages"] = [""]
flat = mb._flatten_artist_credit([credit_dict])
- self.assertEqual(flat, ('NAME', 'SORT', 'CREDIT'))
+ self.assertEqual(flat, ("NAME", "SORT", "CREDIT"))
# test en primary
- config['import']['languages'] = ['en']
+ config["import"]["languages"] = ["en"]
flat = mb._flatten_artist_credit([credit_dict])
- self.assertEqual(flat, ('ALIASen', 'ALIASSORTen', 'CREDIT'))
+ self.assertEqual(flat, ("ALIASen", "ALIASSORTen", "CREDIT"))
# test en_GB en primary
- config['import']['languages'] = ['en_GB', 'en']
+ config["import"]["languages"] = ["en_GB", "en"]
flat = mb._flatten_artist_credit([credit_dict])
- self.assertEqual(flat, ('ALIASen_GB', 'ALIASSORTen_GB', 'CREDIT'))
+ self.assertEqual(flat, ("ALIASen_GB", "ALIASSORTen_GB", "CREDIT"))
# test en en_GB primary
- config['import']['languages'] = ['en', 'en_GB']
+ config["import"]["languages"] = ["en", "en_GB"]
flat = mb._flatten_artist_credit([credit_dict])
- self.assertEqual(flat, ('ALIASen', 'ALIASSORTen', 'CREDIT'))
+ self.assertEqual(flat, ("ALIASen", "ALIASSORTen", "CREDIT"))
# test fr primary
- config['import']['languages'] = ['fr']
+ config["import"]["languages"] = ["fr"]
flat = mb._flatten_artist_credit([credit_dict])
- self.assertEqual(flat, ('ALIASfr_P', 'ALIASSORTfr_P', 'CREDIT'))
+ self.assertEqual(flat, ("ALIASfr_P", "ALIASSORTfr_P", "CREDIT"))
# test for not matching non-primary
- config['import']['languages'] = ['pt_BR', 'fr']
+ config["import"]["languages"] = ["pt_BR", "fr"]
flat = mb._flatten_artist_credit([credit_dict])
- self.assertEqual(flat, ('ALIASfr_P', 'ALIASSORTfr_P', 'CREDIT'))
+ self.assertEqual(flat, ("ALIASfr_P", "ALIASSORTfr_P", "CREDIT"))
class MBLibraryTest(unittest.TestCase):
def test_match_track(self):
- with mock.patch('musicbrainzngs.search_recordings') as p:
+ with mock.patch("musicbrainzngs.search_recordings") as p:
p.return_value = {
- 'recording-list': [{
- 'title': 'foo',
- 'id': 'bar',
- 'length': 42,
- }],
+ "recording-list": [
+ {
+ "title": "foo",
+ "id": "bar",
+ "length": 42,
+ }
+ ],
}
- ti = list(mb.match_track('hello', 'there'))[0]
+ ti = list(mb.match_track("hello", "there"))[0]
- p.assert_called_with(artist='hello', recording='there', limit=5)
- self.assertEqual(ti.title, 'foo')
- self.assertEqual(ti.track_id, 'bar')
+ p.assert_called_with(artist="hello", recording="there", limit=5)
+ self.assertEqual(ti.title, "foo")
+ self.assertEqual(ti.track_id, "bar")
def test_match_album(self):
- mbid = 'd2a6f856-b553-40a0-ac54-a321e8e2da99'
- with mock.patch('musicbrainzngs.search_releases') as sp:
+ mbid = "d2a6f856-b553-40a0-ac54-a321e8e2da99"
+ with mock.patch("musicbrainzngs.search_releases") as sp:
sp.return_value = {
- 'release-list': [{
- 'id': mbid,
- }],
+ "release-list": [
+ {
+ "id": mbid,
+ }
+ ],
}
- with mock.patch('musicbrainzngs.get_release_by_id') as gp:
+ with mock.patch("musicbrainzngs.get_release_by_id") as gp:
gp.return_value = {
- 'release': {
- 'title': 'hi',
- 'id': mbid,
- 'status': 'status',
- 'medium-list': [{
- 'track-list': [{
- 'id': 'baz',
- 'recording': {
- 'title': 'foo',
- 'id': 'bar',
- 'length': 42,
+ "release": {
+ "title": "hi",
+ "id": mbid,
+ "status": "status",
+ "medium-list": [
+ {
+ "track-list": [
+ {
+ "id": "baz",
+ "recording": {
+ "title": "foo",
+ "id": "bar",
+ "length": 42,
+ },
+ "position": 9,
+ "number": "A1",
+ }
+ ],
+ "position": 5,
+ }
+ ],
+ "artist-credit": [
+ {
+ "artist": {
+ "name": "some-artist",
+ "id": "some-id",
},
- 'position': 9,
- 'number': 'A1',
- }],
- 'position': 5,
- }],
- 'artist-credit': [{
- 'artist': {
- 'name': 'some-artist',
- 'id': 'some-id',
- },
- }],
- 'release-group': {
- 'id': 'another-id',
- }
+ }
+ ],
+ "release-group": {
+ "id": "another-id",
+ },
}
}
- ai = list(mb.match_album('hello', 'there'))[0]
+ ai = list(mb.match_album("hello", "there"))[0]
- sp.assert_called_with(artist='hello', release='there', limit=5)
+ sp.assert_called_with(artist="hello", release="there", limit=5)
gp.assert_called_with(mbid, mock.ANY)
- self.assertEqual(ai.tracks[0].title, 'foo')
- self.assertEqual(ai.album, 'hi')
+ self.assertEqual(ai.tracks[0].title, "foo")
+ self.assertEqual(ai.album, "hi")
def test_match_track_empty(self):
- with mock.patch('musicbrainzngs.search_recordings') as p:
- til = list(mb.match_track(' ', ' '))
+ with mock.patch("musicbrainzngs.search_recordings") as p:
+ til = list(mb.match_track(" ", " "))
self.assertFalse(p.called)
self.assertEqual(til, [])
def test_match_album_empty(self):
- with mock.patch('musicbrainzngs.search_releases') as p:
- ail = list(mb.match_album(' ', ' '))
+ with mock.patch("musicbrainzngs.search_releases") as p:
+ ail = list(mb.match_album(" ", " "))
self.assertFalse(p.called)
self.assertEqual(ail, [])
def test_follow_pseudo_releases(self):
side_effect = [
{
- 'release': {
- 'title': 'pseudo',
- 'id': 'd2a6f856-b553-40a0-ac54-a321e8e2da02',
- 'status': 'Pseudo-Release',
- 'medium-list': [{
- 'track-list': [{
- 'id': 'baz',
- 'recording': {
- 'title': 'translated title',
- 'id': 'bar',
- 'length': 42,
+ "release": {
+ "title": "pseudo",
+ "id": "d2a6f856-b553-40a0-ac54-a321e8e2da02",
+ "status": "Pseudo-Release",
+ "medium-list": [
+ {
+ "track-list": [
+ {
+ "id": "baz",
+ "recording": {
+ "title": "translated title",
+ "id": "bar",
+ "length": 42,
+ },
+ "position": 9,
+ "number": "A1",
+ }
+ ],
+ "position": 5,
+ }
+ ],
+ "artist-credit": [
+ {
+ "artist": {
+ "name": "some-artist",
+ "id": "some-id",
},
- 'position': 9,
- 'number': 'A1',
- }],
- 'position': 5,
- }],
- 'artist-credit': [{
- 'artist': {
- 'name': 'some-artist',
- 'id': 'some-id',
- },
- }],
- 'release-group': {
- 'id': 'another-id',
+ }
+ ],
+ "release-group": {
+ "id": "another-id",
},
- 'release-relation-list': [
+ "release-relation-list": [
{
- 'type': 'transl-tracklisting',
- 'target': 'd2a6f856-b553-40a0-ac54-a321e8e2da01',
- 'direction': 'backward'
+ "type": "transl-tracklisting",
+ "target": "d2a6f856-b553-40a0-ac54-a321e8e2da01",
+ "direction": "backward",
}
- ]
+ ],
}
},
{
- 'release': {
- 'title': 'actual',
- 'id': 'd2a6f856-b553-40a0-ac54-a321e8e2da01',
- 'status': 'Official',
- 'medium-list': [{
- 'track-list': [{
- 'id': 'baz',
- 'recording': {
- 'title': 'original title',
- 'id': 'bar',
- 'length': 42,
- },
- 'position': 9,
- 'number': 'A1',
- }],
- 'position': 5,
- }],
- 'artist-credit': [{
- 'artist': {
- 'name': 'some-artist',
- 'id': 'some-id',
+ "release": {
+ "title": "actual",
+ "id": "d2a6f856-b553-40a0-ac54-a321e8e2da01",
+ "status": "Official",
+ "medium-list": [
+ {
+ "track-list": [
+ {
+ "id": "baz",
+ "recording": {
+ "title": "original title",
+ "id": "bar",
+ "length": 42,
+ },
+ "position": 9,
+ "number": "A1",
+ }
+ ],
+ "position": 5,
+ }
+ ],
+ "artist-credit": [
+ {
+ "artist": {
+ "name": "some-artist",
+ "id": "some-id",
},
- }],
- 'release-group': {
- 'id': 'another-id',
- },
- 'country': 'COUNTRY',
- }
+ }
+ ],
+ "release-group": {
+ "id": "another-id",
+ },
+ "country": "COUNTRY",
}
+ },
]
- with mock.patch('musicbrainzngs.get_release_by_id') as gp:
+ with mock.patch("musicbrainzngs.get_release_by_id") as gp:
gp.side_effect = side_effect
- album = mb.album_for_id('d2a6f856-b553-40a0-ac54-a321e8e2da02')
- self.assertEqual(album.country, 'COUNTRY')
+ album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02")
+ self.assertEqual(album.country, "COUNTRY")
def test_pseudo_releases_with_empty_links(self):
- side_effect = [{
- 'release': {
- 'title': 'pseudo',
- 'id': 'd2a6f856-b553-40a0-ac54-a321e8e2da02',
- 'status': 'Pseudo-Release',
- 'medium-list': [{
- 'track-list': [{
- 'id': 'baz',
- 'recording': {
- 'title': 'translated title',
- 'id': 'bar',
- 'length': 42,
- },
- 'position': 9,
- 'number': 'A1',
- }],
- 'position': 5,
- }],
- 'artist-credit': [{
- 'artist': {
- 'name': 'some-artist',
- 'id': 'some-id',
+ side_effect = [
+ {
+ "release": {
+ "title": "pseudo",
+ "id": "d2a6f856-b553-40a0-ac54-a321e8e2da02",
+ "status": "Pseudo-Release",
+ "medium-list": [
+ {
+ "track-list": [
+ {
+ "id": "baz",
+ "recording": {
+ "title": "translated title",
+ "id": "bar",
+ "length": 42,
+ },
+ "position": 9,
+ "number": "A1",
+ }
+ ],
+ "position": 5,
+ }
+ ],
+ "artist-credit": [
+ {
+ "artist": {
+ "name": "some-artist",
+ "id": "some-id",
},
- }],
- 'release-group': {
- 'id': 'another-id',
- },
- 'release-relation-list': []
- }
- },
+ }
+ ],
+ "release-group": {
+ "id": "another-id",
+ },
+ "release-relation-list": [],
+ }
+ },
]
- with mock.patch('musicbrainzngs.get_release_by_id') as gp:
+ with mock.patch("musicbrainzngs.get_release_by_id") as gp:
gp.side_effect = side_effect
- album = mb.album_for_id('d2a6f856-b553-40a0-ac54-a321e8e2da02')
+ album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02")
self.assertEqual(album.country, None)
def test_pseudo_releases_without_links(self):
- side_effect = [{
- 'release': {
- 'title': 'pseudo',
- 'id': 'd2a6f856-b553-40a0-ac54-a321e8e2da02',
- 'status': 'Pseudo-Release',
- 'medium-list': [{
- 'track-list': [{
- 'id': 'baz',
- 'recording': {
- 'title': 'translated title',
- 'id': 'bar',
- 'length': 42,
- },
- 'position': 9,
- 'number': 'A1',
- }],
- 'position': 5,
- }],
- 'artist-credit': [{
- 'artist': {
- 'name': 'some-artist',
- 'id': 'some-id',
+ side_effect = [
+ {
+ "release": {
+ "title": "pseudo",
+ "id": "d2a6f856-b553-40a0-ac54-a321e8e2da02",
+ "status": "Pseudo-Release",
+ "medium-list": [
+ {
+ "track-list": [
+ {
+ "id": "baz",
+ "recording": {
+ "title": "translated title",
+ "id": "bar",
+ "length": 42,
+ },
+ "position": 9,
+ "number": "A1",
+ }
+ ],
+ "position": 5,
+ }
+ ],
+ "artist-credit": [
+ {
+ "artist": {
+ "name": "some-artist",
+ "id": "some-id",
},
- }],
- 'release-group': {
- 'id': 'another-id',
- },
- }
- },
+ }
+ ],
+ "release-group": {
+ "id": "another-id",
+ },
+ }
+ },
]
- with mock.patch('musicbrainzngs.get_release_by_id') as gp:
+ with mock.patch("musicbrainzngs.get_release_by_id") as gp:
gp.side_effect = side_effect
- album = mb.album_for_id('d2a6f856-b553-40a0-ac54-a321e8e2da02')
+ album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02")
self.assertEqual(album.country, None)
def test_pseudo_releases_with_unsupported_links(self):
side_effect = [
{
- 'release': {
- 'title': 'pseudo',
- 'id': 'd2a6f856-b553-40a0-ac54-a321e8e2da02',
- 'status': 'Pseudo-Release',
- 'medium-list': [{
- 'track-list': [{
- 'id': 'baz',
- 'recording': {
- 'title': 'translated title',
- 'id': 'bar',
- 'length': 42,
+ "release": {
+ "title": "pseudo",
+ "id": "d2a6f856-b553-40a0-ac54-a321e8e2da02",
+ "status": "Pseudo-Release",
+ "medium-list": [
+ {
+ "track-list": [
+ {
+ "id": "baz",
+ "recording": {
+ "title": "translated title",
+ "id": "bar",
+ "length": 42,
+ },
+ "position": 9,
+ "number": "A1",
+ }
+ ],
+ "position": 5,
+ }
+ ],
+ "artist-credit": [
+ {
+ "artist": {
+ "name": "some-artist",
+ "id": "some-id",
},
- 'position': 9,
- 'number': 'A1',
- }],
- 'position': 5,
- }],
- 'artist-credit': [{
- 'artist': {
- 'name': 'some-artist',
- 'id': 'some-id',
- },
- }],
- 'release-group': {
- 'id': 'another-id',
+ }
+ ],
+ "release-group": {
+ "id": "another-id",
},
- 'release-relation-list': [
+ "release-relation-list": [
{
- 'type': 'remaster',
- 'target': 'd2a6f856-b553-40a0-ac54-a321e8e2da01',
- 'direction': 'backward'
+ "type": "remaster",
+ "target": "d2a6f856-b553-40a0-ac54-a321e8e2da01",
+ "direction": "backward",
}
- ]
+ ],
}
},
]
- with mock.patch('musicbrainzngs.get_release_by_id') as gp:
+ with mock.patch("musicbrainzngs.get_release_by_id") as gp:
gp.side_effect = side_effect
- album = mb.album_for_id('d2a6f856-b553-40a0-ac54-a321e8e2da02')
+ album = mb.album_for_id("d2a6f856-b553-40a0-ac54-a321e8e2da02")
self.assertEqual(album.country, None)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_metasync.py b/test/test_metasync.py
index 99c2f5a703..18493adb43 100644
--- a/test/test_metasync.py
+++ b/test/test_metasync.py
@@ -16,17 +16,17 @@
import os
import platform
import time
-from datetime import datetime
-from beets.library import Item
-from beets.util import py3_path
import unittest
-
+from datetime import datetime
from test import _common
from test.helper import TestHelper
+from beets.library import Item
+from beets.util import py3_path
+
def _parsetime(s):
- return time.mktime(datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timetuple())
+ return time.mktime(datetime.strptime(s, "%Y-%m-%d %H:%M:%S").timetuple())
def _is_windows():
@@ -34,48 +34,52 @@ def _is_windows():
class MetaSyncTest(_common.TestCase, TestHelper):
- itunes_library_unix = os.path.join(_common.RSRC,
- b'itunes_library_unix.xml')
- itunes_library_windows = os.path.join(_common.RSRC,
- b'itunes_library_windows.xml')
+ itunes_library_unix = os.path.join(_common.RSRC, b"itunes_library_unix.xml")
+ itunes_library_windows = os.path.join(
+ _common.RSRC, b"itunes_library_windows.xml"
+ )
def setUp(self):
self.setup_beets()
- self.load_plugins('metasync')
+ self.load_plugins("metasync")
- self.config['metasync']['source'] = 'itunes'
+ self.config["metasync"]["source"] = "itunes"
if _is_windows():
- self.config['metasync']['itunes']['library'] = \
- py3_path(self.itunes_library_windows)
+ self.config["metasync"]["itunes"]["library"] = py3_path(
+ self.itunes_library_windows
+ )
else:
- self.config['metasync']['itunes']['library'] = \
- py3_path(self.itunes_library_unix)
+ self.config["metasync"]["itunes"]["library"] = py3_path(
+ self.itunes_library_unix
+ )
self._set_up_data()
def _set_up_data(self):
items = [_common.item() for _ in range(2)]
- items[0].title = 'Tessellate'
- items[0].artist = 'alt-J'
- items[0].albumartist = 'alt-J'
- items[0].album = 'An Awesome Wave'
+ items[0].title = "Tessellate"
+ items[0].artist = "alt-J"
+ items[0].albumartist = "alt-J"
+ items[0].album = "An Awesome Wave"
items[0].itunes_rating = 60
- items[1].title = 'Breezeblocks'
- items[1].artist = 'alt-J'
- items[1].albumartist = 'alt-J'
- items[1].album = 'An Awesome Wave'
+ items[1].title = "Breezeblocks"
+ items[1].artist = "alt-J"
+ items[1].albumartist = "alt-J"
+ items[1].album = "An Awesome Wave"
if _is_windows():
- items[0].path = \
- 'G:\\Music\\Alt-J\\An Awesome Wave\\03 Tessellate.mp3'
- items[1].path = \
- 'G:\\Music\\Alt-J\\An Awesome Wave\\04 Breezeblocks.mp3'
+ items[
+ 0
+ ].path = "G:\\Music\\Alt-J\\An Awesome Wave\\03 Tessellate.mp3"
+ items[
+ 1
+ ].path = "G:\\Music\\Alt-J\\An Awesome Wave\\04 Breezeblocks.mp3"
else:
- items[0].path = '/Music/Alt-J/An Awesome Wave/03 Tessellate.mp3'
- items[1].path = '/Music/Alt-J/An Awesome Wave/04 Breezeblocks.mp3'
+ items[0].path = "/Music/Alt-J/An Awesome Wave/03 Tessellate.mp3"
+ items[1].path = "/Music/Alt-J/An Awesome Wave/04 Breezeblocks.mp3"
for item in items:
self.lib.add(item)
@@ -86,46 +90,54 @@ def tearDown(self):
def test_load_item_types(self):
# This test also verifies that the MetaSources have loaded correctly
- self.assertIn('amarok_score', Item._types)
- self.assertIn('itunes_rating', Item._types)
+ self.assertIn("amarok_score", Item._types)
+ self.assertIn("itunes_rating", Item._types)
def test_pretend_sync_from_itunes(self):
- out = self.run_with_output('metasync', '-p')
-
- self.assertIn('itunes_rating: 60 -> 80', out)
- self.assertIn('itunes_rating: 100', out)
- self.assertIn('itunes_playcount: 31', out)
- self.assertIn('itunes_skipcount: 3', out)
- self.assertIn('itunes_lastplayed: 2015-05-04 12:20:51', out)
- self.assertIn('itunes_lastskipped: 2015-02-05 15:41:04', out)
- self.assertIn('itunes_dateadded: 2014-04-24 09:28:38', out)
+ out = self.run_with_output("metasync", "-p")
+
+ self.assertIn("itunes_rating: 60 -> 80", out)
+ self.assertIn("itunes_rating: 100", out)
+ self.assertIn("itunes_playcount: 31", out)
+ self.assertIn("itunes_skipcount: 3", out)
+ self.assertIn("itunes_lastplayed: 2015-05-04 12:20:51", out)
+ self.assertIn("itunes_lastskipped: 2015-02-05 15:41:04", out)
+ self.assertIn("itunes_dateadded: 2014-04-24 09:28:38", out)
self.assertEqual(self.lib.items()[0].itunes_rating, 60)
def test_sync_from_itunes(self):
- self.run_command('metasync')
+ self.run_command("metasync")
self.assertEqual(self.lib.items()[0].itunes_rating, 80)
self.assertEqual(self.lib.items()[0].itunes_playcount, 0)
self.assertEqual(self.lib.items()[0].itunes_skipcount, 3)
- self.assertFalse(hasattr(self.lib.items()[0], 'itunes_lastplayed'))
- self.assertEqual(self.lib.items()[0].itunes_lastskipped,
- _parsetime('2015-02-05 15:41:04'))
- self.assertEqual(self.lib.items()[0].itunes_dateadded,
- _parsetime('2014-04-24 09:28:38'))
+ self.assertFalse(hasattr(self.lib.items()[0], "itunes_lastplayed"))
+ self.assertEqual(
+ self.lib.items()[0].itunes_lastskipped,
+ _parsetime("2015-02-05 15:41:04"),
+ )
+ self.assertEqual(
+ self.lib.items()[0].itunes_dateadded,
+ _parsetime("2014-04-24 09:28:38"),
+ )
self.assertEqual(self.lib.items()[1].itunes_rating, 100)
self.assertEqual(self.lib.items()[1].itunes_playcount, 31)
self.assertEqual(self.lib.items()[1].itunes_skipcount, 0)
- self.assertEqual(self.lib.items()[1].itunes_lastplayed,
- _parsetime('2015-05-04 12:20:51'))
- self.assertEqual(self.lib.items()[1].itunes_dateadded,
- _parsetime('2014-04-24 09:28:38'))
- self.assertFalse(hasattr(self.lib.items()[1], 'itunes_lastskipped'))
+ self.assertEqual(
+ self.lib.items()[1].itunes_lastplayed,
+ _parsetime("2015-05-04 12:20:51"),
+ )
+ self.assertEqual(
+ self.lib.items()[1].itunes_dateadded,
+ _parsetime("2014-04-24 09:28:38"),
+ )
+ self.assertFalse(hasattr(self.lib.items()[1], "itunes_lastskipped"))
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_pipeline.py b/test/test_pipeline.py
index cc93d2442b..f5d4cebf33 100644
--- a/test/test_pipeline.py
+++ b/test/test_pipeline.py
@@ -97,9 +97,9 @@ def test_pull_chain(self):
class ParallelStageTest(unittest.TestCase):
def setUp(self):
self.l = []
- self.pl = pipeline.Pipeline((
- _produce(), (_work(), _work()), _consume(self.l)
- ))
+ self.pl = pipeline.Pipeline(
+ (_produce(), (_work(), _work()), _consume(self.l))
+ )
def test_run_sequential(self):
self.pl.run_sequential()
@@ -118,8 +118,7 @@ def test_pull(self):
class ExceptionTest(unittest.TestCase):
def setUp(self):
self.l = []
- self.pl = pipeline.Pipeline((_produce(), _exc_work(),
- _consume(self.l)))
+ self.pl = pipeline.Pipeline((_produce(), _exc_work(), _consume(self.l)))
def test_run_sequential(self):
self.assertRaises(ExceptionFixture, self.pl.run_sequential)
@@ -138,9 +137,9 @@ def test_pull(self):
class ParallelExceptionTest(unittest.TestCase):
def setUp(self):
self.l = []
- self.pl = pipeline.Pipeline((
- _produce(), (_exc_work(), _exc_work()), _consume(self.l)
- ))
+ self.pl = pipeline.Pipeline(
+ (_produce(), (_exc_work(), _exc_work()), _consume(self.l))
+ )
def test_run_parallel(self):
self.assertRaises(ExceptionFixture, self.pl.run_parallel)
@@ -163,9 +162,9 @@ def test_constrained_exception(self):
def test_constrained_parallel(self):
l = []
- pl = pipeline.Pipeline((
- _produce(1000), (_work(), _work()), _consume(l)
- ))
+ pl = pipeline.Pipeline(
+ (_produce(1000), (_work(), _work()), _consume(l))
+ )
pl.run_parallel(1)
self.assertEqual(set(l), {i * 2 for i in range(1000)})
@@ -173,8 +172,7 @@ def test_constrained_parallel(self):
class BubbleTest(unittest.TestCase):
def setUp(self):
self.l = []
- self.pl = pipeline.Pipeline((_produce(), _bub_work(),
- _consume(self.l)))
+ self.pl = pipeline.Pipeline((_produce(), _bub_work(), _consume(self.l)))
def test_run_sequential(self):
self.pl.run_sequential()
@@ -192,9 +190,9 @@ def test_pull(self):
class MultiMessageTest(unittest.TestCase):
def setUp(self):
self.l = []
- self.pl = pipeline.Pipeline((
- _produce(), _multi_work(), _consume(self.l)
- ))
+ self.pl = pipeline.Pipeline(
+ (_produce(), _multi_work(), _consume(self.l))
+ )
def test_run_sequential(self):
self.pl.run_sequential()
@@ -210,16 +208,12 @@ def test_pull(self):
class StageDecoratorTest(unittest.TestCase):
-
def test_stage_decorator(self):
@pipeline.stage
def add(n, i):
return i + n
- pl = pipeline.Pipeline([
- iter([1, 2, 3]),
- add(2)
- ])
+ pl = pipeline.Pipeline([iter([1, 2, 3]), add(2)])
self.assertEqual(list(pl.pull()), [3, 4, 5])
def test_mutator_stage_decorator(self):
@@ -227,16 +221,20 @@ def test_mutator_stage_decorator(self):
def setkey(key, item):
item[key] = True
- pl = pipeline.Pipeline([
- iter([{'x': False}, {'a': False}]),
- setkey('x'),
- ])
- self.assertEqual(list(pl.pull()),
- [{'x': True}, {'a': False, 'x': True}])
+ pl = pipeline.Pipeline(
+ [
+ iter([{"x": False}, {"a": False}]),
+ setkey("x"),
+ ]
+ )
+ self.assertEqual(
+ list(pl.pull()), [{"x": True}, {"a": False, "x": True}]
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_plugins.py b/test/test_plugins.py
index d09929dd59..320742a189 100644
--- a/test/test_plugins.py
+++ b/test/test_plugins.py
@@ -13,41 +13,48 @@
# included in all copies or substantial portions of the Software.
+import itertools
import os
-from unittest.mock import patch, Mock, ANY
import shutil
-import itertools
import unittest
+from test import helper
+from test._common import RSRC
+from test.test_importer import AutotagStub, ImportHelper
+from test.test_ui_importer import TerminalImportSessionSetup
+from unittest.mock import ANY, Mock, patch
-from beets.importer import SingletonImportTask, SentinelImportTask, \
- ArchiveImportTask, action
-from beets import plugins, config, ui
-from beets.library import Item
-from beets.dbcore import types
from mediafile import MediaFile
-from beets.util import displayable_path, bytestring_path, syspath
-from beets.plugins import MetadataSourcePlugin
-from beets.util.id_extractors import spotify_id_regex, deezer_id_regex, \
- beatport_id_regex
-from test.test_importer import ImportHelper, AutotagStub
-from test.test_ui_importer import TerminalImportSessionSetup
-from test._common import RSRC
-from test import helper
+from beets import config, plugins, ui
+from beets.dbcore import types
+from beets.importer import (
+ ArchiveImportTask,
+ SentinelImportTask,
+ SingletonImportTask,
+ action,
+)
+from beets.library import Item
+from beets.plugins import MetadataSourcePlugin
+from beets.util import bytestring_path, displayable_path, syspath
+from beets.util.id_extractors import (
+ beatport_id_regex,
+ deezer_id_regex,
+ spotify_id_regex,
+)
class TestHelper(helper.TestHelper):
-
def setup_plugin_loader(self):
# FIXME the mocking code is horrific, but this is the lowest and
# earliest level of the plugin mechanism we can hook into.
self.load_plugins()
- self._plugin_loader_patch = patch('beets.plugins.load_plugins')
+ self._plugin_loader_patch = patch("beets.plugins.load_plugins")
self._plugin_classes = set()
load_plugins = self._plugin_loader_patch.start()
def myload(names=()):
plugins._classes.update(self._plugin_classes)
+
load_plugins.side_effect = myload
self.setup_beets()
@@ -60,7 +67,6 @@ def register_plugin(self, plugin_class):
class ItemTypesTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_plugin_loader()
@@ -70,37 +76,37 @@ def tearDown(self):
def test_flex_field_type(self):
class RatingPlugin(plugins.BeetsPlugin):
- item_types = {'rating': types.Float()}
+ item_types = {"rating": types.Float()}
self.register_plugin(RatingPlugin)
- self.config['plugins'] = 'rating'
+ self.config["plugins"] = "rating"
- item = Item(path='apath', artist='aaa')
+ item = Item(path="apath", artist="aaa")
item.add(self.lib)
# Do not match unset values
- out = self.run_with_output('ls', 'rating:1..3')
- self.assertNotIn('aaa', out)
+ out = self.run_with_output("ls", "rating:1..3")
+ self.assertNotIn("aaa", out)
- self.run_command('modify', 'rating=2', '--yes')
+ self.run_command("modify", "rating=2", "--yes")
# Match in range
- out = self.run_with_output('ls', 'rating:1..3')
- self.assertIn('aaa', out)
+ out = self.run_with_output("ls", "rating:1..3")
+ self.assertIn("aaa", out)
# Don't match out of range
- out = self.run_with_output('ls', 'rating:3..5')
- self.assertNotIn('aaa', out)
+ out = self.run_with_output("ls", "rating:3..5")
+ self.assertNotIn("aaa", out)
class ItemWriteTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_plugin_loader()
self.setup_beets()
class EventListenerPlugin(plugins.BeetsPlugin):
pass
+
self.event_listener_plugin = EventListenerPlugin()
self.register_plugin(EventListenerPlugin)
@@ -109,25 +115,23 @@ def tearDown(self):
self.teardown_beets()
def test_change_tags(self):
-
def on_write(item=None, path=None, tags=None):
- if tags['artist'] == 'XXX':
- tags['artist'] = 'YYY'
+ if tags["artist"] == "XXX":
+ tags["artist"] = "YYY"
- self.register_listener('write', on_write)
+ self.register_listener("write", on_write)
- item = self.add_item_fixture(artist='XXX')
+ item = self.add_item_fixture(artist="XXX")
item.write()
mediafile = MediaFile(syspath(item.path))
- self.assertEqual(mediafile.artist, 'YYY')
+ self.assertEqual(mediafile.artist, "YYY")
def register_listener(self, event, func):
self.event_listener_plugin.register_listener(event, func)
class ItemTypeConflictTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_plugin_loader()
self.setup_beets()
@@ -138,25 +142,23 @@ def tearDown(self):
def test_mismatch(self):
class EventListenerPlugin(plugins.BeetsPlugin):
- item_types = {'duplicate': types.INTEGER}
+ item_types = {"duplicate": types.INTEGER}
class AdventListenerPlugin(plugins.BeetsPlugin):
- item_types = {'duplicate': types.FLOAT}
+ item_types = {"duplicate": types.FLOAT}
self.event_listener_plugin = EventListenerPlugin
self.advent_listener_plugin = AdventListenerPlugin
self.register_plugin(EventListenerPlugin)
self.register_plugin(AdventListenerPlugin)
- self.assertRaises(plugins.PluginConflictException,
- plugins.types, Item
- )
+ self.assertRaises(plugins.PluginConflictException, plugins.types, Item)
def test_match(self):
class EventListenerPlugin(plugins.BeetsPlugin):
- item_types = {'duplicate': types.INTEGER}
+ item_types = {"duplicate": types.INTEGER}
class AdventListenerPlugin(plugins.BeetsPlugin):
- item_types = {'duplicate': types.INTEGER}
+ item_types = {"duplicate": types.INTEGER}
self.event_listener_plugin = EventListenerPlugin
self.advent_listener_plugin = AdventListenerPlugin
@@ -166,12 +168,11 @@ class AdventListenerPlugin(plugins.BeetsPlugin):
class EventsTest(unittest.TestCase, ImportHelper, TestHelper):
-
def setUp(self):
self.setup_plugin_loader()
self.setup_beets()
self.__create_import_dir(2)
- config['import']['pretend'] = True
+ config["import"]["pretend"] = True
def tearDown(self):
self.teardown_plugin_loader()
@@ -179,7 +180,7 @@ def tearDown(self):
def __copy_file(self, dest_path, metadata):
# Copy files
- resource_path = os.path.join(RSRC, b'full.mp3')
+ resource_path = os.path.join(RSRC, b"full.mp3")
shutil.copy(syspath(resource_path), syspath(dest_path))
medium = MediaFile(dest_path)
# Set metadata
@@ -188,26 +189,26 @@ def __copy_file(self, dest_path, metadata):
medium.save()
def __create_import_dir(self, count):
- self.import_dir = os.path.join(self.temp_dir, b'testsrcdir')
+ self.import_dir = os.path.join(self.temp_dir, b"testsrcdir")
if os.path.isdir(syspath(self.import_dir)):
shutil.rmtree(syspath(self.import_dir))
- self.album_path = os.path.join(self.import_dir, b'album')
+ self.album_path = os.path.join(self.import_dir, b"album")
os.makedirs(self.album_path)
metadata = {
- 'artist': 'Tag Artist',
- 'album': 'Tag Album',
- 'albumartist': None,
- 'mb_trackid': None,
- 'mb_albumid': None,
- 'comp': None
+ "artist": "Tag Artist",
+ "album": "Tag Album",
+ "albumartist": None,
+ "mb_trackid": None,
+ "mb_albumid": None,
+ "comp": None,
}
self.file_paths = []
for i in range(count):
- metadata['track'] = i + 1
- metadata['title'] = 'Tag Title Album %d' % (i + 1)
- track_file = bytestring_path('%02d - track.mp3' % (i + 1))
+ metadata["track"] = i + 1
+ metadata["title"] = "Tag Title Album %d" % (i + 1)
+ track_file = bytestring_path("%02d - track.mp3" % (i + 1))
dest_path = os.path.join(self.album_path, track_file)
self.__copy_file(dest_path, metadata)
self.file_paths.append(dest_path)
@@ -223,29 +224,35 @@ def test_import_task_created(self):
# Exactly one event should have been imported (for the album).
# Sentinels do not get emitted.
- self.assertEqual(logs.count('Sending event: import_task_created'), 1)
+ self.assertEqual(logs.count("Sending event: import_task_created"), 1)
- logs = [line for line in logs if not line.startswith(
- 'Sending event:')]
- self.assertEqual(logs, [
- 'Album: {}'.format(displayable_path(
- os.path.join(self.import_dir, b'album'))),
- ' {}'.format(displayable_path(self.file_paths[0])),
- ' {}'.format(displayable_path(self.file_paths[1])),
- ])
+ logs = [line for line in logs if not line.startswith("Sending event:")]
+ self.assertEqual(
+ logs,
+ [
+ "Album: {}".format(
+ displayable_path(os.path.join(self.import_dir, b"album"))
+ ),
+ " {}".format(displayable_path(self.file_paths[0])),
+ " {}".format(displayable_path(self.file_paths[1])),
+ ],
+ )
def test_import_task_created_with_plugin(self):
class ToSingletonPlugin(plugins.BeetsPlugin):
def __init__(self):
super().__init__()
- self.register_listener('import_task_created',
- self.import_task_created_event)
+ self.register_listener(
+ "import_task_created", self.import_task_created_event
+ )
def import_task_created_event(self, session, task):
- if isinstance(task, SingletonImportTask) \
- or isinstance(task, SentinelImportTask)\
- or isinstance(task, ArchiveImportTask):
+ if (
+ isinstance(task, SingletonImportTask)
+ or isinstance(task, SentinelImportTask)
+ or isinstance(task, ArchiveImportTask)
+ ):
return task
new_tasks = []
@@ -267,27 +274,28 @@ def import_task_created_event(self, session, task):
# Exactly one event should have been imported (for the album).
# Sentinels do not get emitted.
- self.assertEqual(logs.count('Sending event: import_task_created'), 1)
+ self.assertEqual(logs.count("Sending event: import_task_created"), 1)
- logs = [line for line in logs if not line.startswith(
- 'Sending event:')]
- self.assertEqual(logs, [
- 'Singleton: {}'.format(displayable_path(self.file_paths[0])),
- 'Singleton: {}'.format(displayable_path(self.file_paths[1])),
- ])
+ logs = [line for line in logs if not line.startswith("Sending event:")]
+ self.assertEqual(
+ logs,
+ [
+ "Singleton: {}".format(displayable_path(self.file_paths[0])),
+ "Singleton: {}".format(displayable_path(self.file_paths[1])),
+ ],
+ )
class HelpersTest(unittest.TestCase):
-
def test_sanitize_choices(self):
self.assertEqual(
- plugins.sanitize_choices(['A', 'Z'], ('A', 'B')), ['A'])
- self.assertEqual(
- plugins.sanitize_choices(['A', 'A'], ('A')), ['A'])
+ plugins.sanitize_choices(["A", "Z"], ("A", "B")), ["A"]
+ )
+ self.assertEqual(plugins.sanitize_choices(["A", "A"], ("A")), ["A"])
self.assertEqual(
- plugins.sanitize_choices(['D', '*', 'A'],
- ('A', 'B', 'C', 'D')),
- ['D', 'B', 'C', 'A'])
+ plugins.sanitize_choices(["D", "*", "A"], ("A", "B", "C", "D")),
+ ["D", "B", "C", "A"],
+ )
class ListenersTest(unittest.TestCase, TestHelper):
@@ -299,56 +307,57 @@ def tearDown(self):
self.teardown_beets()
def test_register(self):
-
class DummyPlugin(plugins.BeetsPlugin):
def __init__(self):
super().__init__()
- self.register_listener('cli_exit', self.dummy)
- self.register_listener('cli_exit', self.dummy)
+ self.register_listener("cli_exit", self.dummy)
+ self.register_listener("cli_exit", self.dummy)
def dummy(self):
pass
d = DummyPlugin()
- self.assertEqual(DummyPlugin._raw_listeners['cli_exit'], [d.dummy])
+ self.assertEqual(DummyPlugin._raw_listeners["cli_exit"], [d.dummy])
d2 = DummyPlugin()
- self.assertEqual(DummyPlugin._raw_listeners['cli_exit'],
- [d.dummy, d2.dummy])
+ self.assertEqual(
+ DummyPlugin._raw_listeners["cli_exit"], [d.dummy, d2.dummy]
+ )
- d.register_listener('cli_exit', d2.dummy)
- self.assertEqual(DummyPlugin._raw_listeners['cli_exit'],
- [d.dummy, d2.dummy])
+ d.register_listener("cli_exit", d2.dummy)
+ self.assertEqual(
+ DummyPlugin._raw_listeners["cli_exit"], [d.dummy, d2.dummy]
+ )
- @patch('beets.plugins.find_plugins')
- @patch('inspect.getfullargspec')
+ @patch("beets.plugins.find_plugins")
+ @patch("inspect.getfullargspec")
def test_events_called(self, mock_gfa, mock_find_plugins):
mock_gfa.return_value = Mock(
args=(),
- varargs='args',
- varkw='kwargs',
+ varargs="args",
+ varkw="kwargs",
)
class DummyPlugin(plugins.BeetsPlugin):
def __init__(self):
super().__init__()
- self.foo = Mock(__name__='foo')
- self.register_listener('event_foo', self.foo)
- self.bar = Mock(__name__='bar')
- self.register_listener('event_bar', self.bar)
+ self.foo = Mock(__name__="foo")
+ self.register_listener("event_foo", self.foo)
+ self.bar = Mock(__name__="bar")
+ self.register_listener("event_bar", self.bar)
d = DummyPlugin()
- mock_find_plugins.return_value = d,
+ mock_find_plugins.return_value = (d,)
- plugins.send('event')
+ plugins.send("event")
d.foo.assert_has_calls([])
d.bar.assert_has_calls([])
- plugins.send('event_foo', var="tagada")
+ plugins.send("event_foo", var="tagada")
d.foo.assert_called_once_with(var="tagada")
d.bar.assert_has_calls([])
- @patch('beets.plugins.find_plugins')
+ @patch("beets.plugins.find_plugins")
def test_listener_params(self, mock_find_plugins):
test = self
@@ -357,10 +366,10 @@ def __init__(self):
super().__init__()
for i in itertools.count(1):
try:
- meth = getattr(self, f'dummy{i}')
+ meth = getattr(self, f"dummy{i}")
except AttributeError:
break
- self.register_listener(f'event{i}', meth)
+ self.register_listener(f"event{i}", meth)
def dummy1(self, foo):
test.assertEqual(foo, 5)
@@ -396,27 +405,28 @@ def dummy9(self, **kwargs):
test.assertEqual(kwargs, {"foo": 5})
d = DummyPlugin()
- mock_find_plugins.return_value = d,
+ mock_find_plugins.return_value = (d,)
- plugins.send('event1', foo=5)
- plugins.send('event2', foo=5)
- plugins.send('event3', foo=5)
- plugins.send('event4', foo=5)
+ plugins.send("event1", foo=5)
+ plugins.send("event2", foo=5)
+ plugins.send("event3", foo=5)
+ plugins.send("event4", foo=5)
with self.assertRaises(TypeError):
- plugins.send('event5', foo=5)
+ plugins.send("event5", foo=5)
- plugins.send('event6', foo=5)
- plugins.send('event7', foo=5)
+ plugins.send("event6", foo=5)
+ plugins.send("event7", foo=5)
with self.assertRaises(TypeError):
- plugins.send('event8', foo=5)
+ plugins.send("event8", foo=5)
- plugins.send('event9', foo=5)
+ plugins.send("event9", foo=5)
-class PromptChoicesTest(TerminalImportSessionSetup, unittest.TestCase,
- ImportHelper, TestHelper):
+class PromptChoicesTest(
+ TerminalImportSessionSetup, unittest.TestCase, ImportHelper, TestHelper
+):
def setUp(self):
self.setup_plugin_loader()
self.setup_beets()
@@ -424,8 +434,9 @@ def setUp(self):
self._setup_import_session()
self.matcher = AutotagStub().install()
# keep track of ui.input_option() calls
- self.input_options_patcher = patch('beets.ui.input_options',
- side_effect=ui.input_options)
+ self.input_options_patcher = patch(
+ "beets.ui.input_options", side_effect=ui.input_options
+ )
self.mock_input_options = self.input_options_patcher.start()
def tearDown(self):
@@ -436,174 +447,227 @@ def tearDown(self):
def test_plugin_choices_in_ui_input_options_album(self):
"""Test the presence of plugin choices on the prompt (album)."""
+
class DummyPlugin(plugins.BeetsPlugin):
def __init__(self):
super().__init__()
- self.register_listener('before_choose_candidate',
- self.return_choices)
+ self.register_listener(
+ "before_choose_candidate", self.return_choices
+ )
def return_choices(self, session, task):
- return [ui.commands.PromptChoice('f', 'Foo', None),
- ui.commands.PromptChoice('r', 'baR', None)]
+ return [
+ ui.commands.PromptChoice("f", "Foo", None),
+ ui.commands.PromptChoice("r", "baR", None),
+ ]
self.register_plugin(DummyPlugin)
# Default options + extra choices by the plugin ('Foo', 'Bar')
- opts = ('Apply', 'More candidates', 'Skip', 'Use as-is',
- 'as Tracks', 'Group albums', 'Enter search',
- 'enter Id', 'aBort') + ('Foo', 'baR')
+ opts = (
+ "Apply",
+ "More candidates",
+ "Skip",
+ "Use as-is",
+ "as Tracks",
+ "Group albums",
+ "Enter search",
+ "enter Id",
+ "aBort",
+ ) + ("Foo", "baR")
self.importer.add_choice(action.SKIP)
self.importer.run()
- self.mock_input_options.assert_called_once_with(opts, default='a',
- require=ANY)
+ self.mock_input_options.assert_called_once_with(
+ opts, default="a", require=ANY
+ )
def test_plugin_choices_in_ui_input_options_singleton(self):
"""Test the presence of plugin choices on the prompt (singleton)."""
+
class DummyPlugin(plugins.BeetsPlugin):
def __init__(self):
super().__init__()
- self.register_listener('before_choose_candidate',
- self.return_choices)
+ self.register_listener(
+ "before_choose_candidate", self.return_choices
+ )
def return_choices(self, session, task):
- return [ui.commands.PromptChoice('f', 'Foo', None),
- ui.commands.PromptChoice('r', 'baR', None)]
+ return [
+ ui.commands.PromptChoice("f", "Foo", None),
+ ui.commands.PromptChoice("r", "baR", None),
+ ]
self.register_plugin(DummyPlugin)
# Default options + extra choices by the plugin ('Foo', 'Bar')
- opts = ('Apply', 'More candidates', 'Skip', 'Use as-is',
- 'Enter search',
- 'enter Id', 'aBort') + ('Foo', 'baR')
-
- config['import']['singletons'] = True
+ opts = (
+ "Apply",
+ "More candidates",
+ "Skip",
+ "Use as-is",
+ "Enter search",
+ "enter Id",
+ "aBort",
+ ) + ("Foo", "baR")
+
+ config["import"]["singletons"] = True
self.importer.add_choice(action.SKIP)
self.importer.run()
- self.mock_input_options.assert_called_with(opts, default='a',
- require=ANY)
+ self.mock_input_options.assert_called_with(
+ opts, default="a", require=ANY
+ )
def test_choices_conflicts(self):
"""Test the short letter conflict solving."""
+
class DummyPlugin(plugins.BeetsPlugin):
def __init__(self):
super().__init__()
- self.register_listener('before_choose_candidate',
- self.return_choices)
+ self.register_listener(
+ "before_choose_candidate", self.return_choices
+ )
def return_choices(self, session, task):
- return [ui.commands.PromptChoice('a', 'A foo', None), # dupe
- ui.commands.PromptChoice('z', 'baZ', None), # ok
- ui.commands.PromptChoice('z', 'Zupe', None), # dupe
- ui.commands.PromptChoice('z', 'Zoo', None)] # dupe
+ return [
+ ui.commands.PromptChoice("a", "A foo", None), # dupe
+ ui.commands.PromptChoice("z", "baZ", None), # ok
+ ui.commands.PromptChoice("z", "Zupe", None), # dupe
+ ui.commands.PromptChoice("z", "Zoo", None),
+ ] # dupe
self.register_plugin(DummyPlugin)
# Default options + not dupe extra choices by the plugin ('baZ')
- opts = ('Apply', 'More candidates', 'Skip', 'Use as-is',
- 'as Tracks', 'Group albums', 'Enter search',
- 'enter Id', 'aBort') + ('baZ',)
+ opts = (
+ "Apply",
+ "More candidates",
+ "Skip",
+ "Use as-is",
+ "as Tracks",
+ "Group albums",
+ "Enter search",
+ "enter Id",
+ "aBort",
+ ) + ("baZ",)
self.importer.add_choice(action.SKIP)
self.importer.run()
- self.mock_input_options.assert_called_once_with(opts, default='a',
- require=ANY)
+ self.mock_input_options.assert_called_once_with(
+ opts, default="a", require=ANY
+ )
def test_plugin_callback(self):
"""Test that plugin callbacks are being called upon user choice."""
+
class DummyPlugin(plugins.BeetsPlugin):
def __init__(self):
super().__init__()
- self.register_listener('before_choose_candidate',
- self.return_choices)
+ self.register_listener(
+ "before_choose_candidate", self.return_choices
+ )
def return_choices(self, session, task):
- return [ui.commands.PromptChoice('f', 'Foo', self.foo)]
+ return [ui.commands.PromptChoice("f", "Foo", self.foo)]
def foo(self, session, task):
pass
self.register_plugin(DummyPlugin)
# Default options + extra choices by the plugin ('Foo', 'Bar')
- opts = ('Apply', 'More candidates', 'Skip', 'Use as-is',
- 'as Tracks', 'Group albums', 'Enter search',
- 'enter Id', 'aBort') + ('Foo',)
+ opts = (
+ "Apply",
+ "More candidates",
+ "Skip",
+ "Use as-is",
+ "as Tracks",
+ "Group albums",
+ "Enter search",
+ "enter Id",
+ "aBort",
+ ) + ("Foo",)
# DummyPlugin.foo() should be called once
- with patch.object(DummyPlugin, 'foo', autospec=True) as mock_foo:
- with helper.control_stdin('\n'.join(['f', 's'])):
+ with patch.object(DummyPlugin, "foo", autospec=True) as mock_foo:
+ with helper.control_stdin("\n".join(["f", "s"])):
self.importer.run()
self.assertEqual(mock_foo.call_count, 1)
# input_options should be called twice, as foo() returns None
self.assertEqual(self.mock_input_options.call_count, 2)
- self.mock_input_options.assert_called_with(opts, default='a',
- require=ANY)
+ self.mock_input_options.assert_called_with(
+ opts, default="a", require=ANY
+ )
def test_plugin_callback_return(self):
"""Test that plugin callbacks that return a value exit the loop."""
+
class DummyPlugin(plugins.BeetsPlugin):
def __init__(self):
super().__init__()
- self.register_listener('before_choose_candidate',
- self.return_choices)
+ self.register_listener(
+ "before_choose_candidate", self.return_choices
+ )
def return_choices(self, session, task):
- return [ui.commands.PromptChoice('f', 'Foo', self.foo)]
+ return [ui.commands.PromptChoice("f", "Foo", self.foo)]
def foo(self, session, task):
return action.SKIP
self.register_plugin(DummyPlugin)
# Default options + extra choices by the plugin ('Foo', 'Bar')
- opts = ('Apply', 'More candidates', 'Skip', 'Use as-is',
- 'as Tracks', 'Group albums', 'Enter search',
- 'enter Id', 'aBort') + ('Foo',)
+ opts = (
+ "Apply",
+ "More candidates",
+ "Skip",
+ "Use as-is",
+ "as Tracks",
+ "Group albums",
+ "Enter search",
+ "enter Id",
+ "aBort",
+ ) + ("Foo",)
# DummyPlugin.foo() should be called once
- with helper.control_stdin('f\n'):
+ with helper.control_stdin("f\n"):
self.importer.run()
# input_options should be called once, as foo() returns SKIP
- self.mock_input_options.assert_called_once_with(opts, default='a',
- require=ANY)
+ self.mock_input_options.assert_called_once_with(
+ opts, default="a", require=ANY
+ )
class ParseSpotifyIDTest(unittest.TestCase):
def test_parse_id_correct(self):
id_string = "39WqpoPgZxygo6YQjehLJJ"
- out = MetadataSourcePlugin._get_id(
- "album", id_string, spotify_id_regex)
+ out = MetadataSourcePlugin._get_id("album", id_string, spotify_id_regex)
self.assertEqual(out, id_string)
def test_parse_id_non_id_returns_none(self):
id_string = "blah blah"
- out = MetadataSourcePlugin._get_id(
- "album", id_string, spotify_id_regex)
+ out = MetadataSourcePlugin._get_id("album", id_string, spotify_id_regex)
self.assertEqual(out, None)
def test_parse_id_url_finds_id(self):
id_string = "39WqpoPgZxygo6YQjehLJJ"
id_url = "https://open.spotify.com/album/%s" % id_string
- out = MetadataSourcePlugin._get_id(
- "album", id_url, spotify_id_regex)
+ out = MetadataSourcePlugin._get_id("album", id_url, spotify_id_regex)
self.assertEqual(out, id_string)
class ParseDeezerIDTest(unittest.TestCase):
def test_parse_id_correct(self):
id_string = "176356382"
- out = MetadataSourcePlugin._get_id(
- "album", id_string, deezer_id_regex)
+ out = MetadataSourcePlugin._get_id("album", id_string, deezer_id_regex)
self.assertEqual(out, id_string)
def test_parse_id_non_id_returns_none(self):
id_string = "blah blah"
- out = MetadataSourcePlugin._get_id(
- "album", id_string, deezer_id_regex)
+ out = MetadataSourcePlugin._get_id("album", id_string, deezer_id_regex)
self.assertEqual(out, None)
def test_parse_id_url_finds_id(self):
id_string = "176356382"
id_url = "https://www.deezer.com/album/%s" % id_string
- out = MetadataSourcePlugin._get_id(
- "album", id_url, deezer_id_regex)
+ out = MetadataSourcePlugin._get_id("album", id_url, deezer_id_regex)
self.assertEqual(out, id_string)
@@ -611,25 +675,27 @@ class ParseBeatportIDTest(unittest.TestCase):
def test_parse_id_correct(self):
id_string = "3089651"
out = MetadataSourcePlugin._get_id(
- "album", id_string, beatport_id_regex)
+ "album", id_string, beatport_id_regex
+ )
self.assertEqual(out, id_string)
def test_parse_id_non_id_returns_none(self):
id_string = "blah blah"
out = MetadataSourcePlugin._get_id(
- "album", id_string, beatport_id_regex)
+ "album", id_string, beatport_id_regex
+ )
self.assertEqual(out, None)
def test_parse_id_url_finds_id(self):
id_string = "3089651"
id_url = "https://www.beatport.com/release/album-name/%s" % id_string
- out = MetadataSourcePlugin._get_id(
- "album", id_url, beatport_id_regex)
+ out = MetadataSourcePlugin._get_id("album", id_url, beatport_id_regex)
self.assertEqual(out, id_string)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_query.py b/test/test_query.py
index 5f8450c496..a6538f7caa 100644
--- a/test/test_query.py
+++ b/test/test_query.py
@@ -15,31 +15,30 @@
"""Various tests for querying the library database.
"""
-from contextlib import contextmanager
-from functools import partial
import os
import sys
import unittest
-
-from test import _common
-from test import helper
+from contextlib import contextmanager
+from functools import partial
+from test import _common, helper
import beets.library
-from beets import dbcore
+from beets import dbcore, util
from beets.dbcore import types
-from beets.dbcore.query import (NoneQuery, ParsingError,
- InvalidQueryArgumentValueError)
-from beets.library import Library, Item
-from beets import util
+from beets.dbcore.query import (
+ InvalidQueryArgumentValueError,
+ NoneQuery,
+ ParsingError,
+)
+from beets.library import Item, Library
from beets.util import syspath
# Because the absolute path begins with something like C:, we
# can't disambiguate it from an ordinary query.
-WIN32_NO_IMPLICIT_PATHS = 'Implicit paths are not supported on Windows'
+WIN32_NO_IMPLICIT_PATHS = "Implicit paths are not supported on Windows"
class TestHelper(helper.TestHelper):
-
def assertInResult(self, item, results): # noqa
result_ids = [i.id for i in results]
self.assertIn(item.id, result_ids)
@@ -52,26 +51,31 @@ def assertNotInResult(self, item, results): # noqa
class AnyFieldQueryTest(_common.LibTestCase):
def test_no_restriction(self):
q = dbcore.query.AnyFieldQuery(
- 'title', beets.library.Item._fields.keys(),
- dbcore.query.SubstringQuery
+ "title",
+ beets.library.Item._fields.keys(),
+ dbcore.query.SubstringQuery,
)
- self.assertEqual(self.lib.items(q).get().title, 'the title')
+ self.assertEqual(self.lib.items(q).get().title, "the title")
def test_restriction_completeness(self):
- q = dbcore.query.AnyFieldQuery('title', ['title'],
- dbcore.query.SubstringQuery)
- self.assertEqual(self.lib.items(q).get().title, 'the title')
+ q = dbcore.query.AnyFieldQuery(
+ "title", ["title"], dbcore.query.SubstringQuery
+ )
+ self.assertEqual(self.lib.items(q).get().title, "the title")
def test_restriction_soundness(self):
- q = dbcore.query.AnyFieldQuery('title', ['artist'],
- dbcore.query.SubstringQuery)
+ q = dbcore.query.AnyFieldQuery(
+ "title", ["artist"], dbcore.query.SubstringQuery
+ )
self.assertEqual(self.lib.items(q).get(), None)
def test_eq(self):
- q1 = dbcore.query.AnyFieldQuery('foo', ['bar'],
- dbcore.query.SubstringQuery)
- q2 = dbcore.query.AnyFieldQuery('foo', ['bar'],
- dbcore.query.SubstringQuery)
+ q1 = dbcore.query.AnyFieldQuery(
+ "foo", ["bar"], dbcore.query.SubstringQuery
+ )
+ q2 = dbcore.query.AnyFieldQuery(
+ "foo", ["bar"], dbcore.query.SubstringQuery
+ )
self.assertEqual(q1, q2)
q2.query_class = None
@@ -91,44 +95,47 @@ def assert_albums_matched(self, results, albums):
class DummyDataTestCase(_common.TestCase, AssertsMixin):
def setUp(self):
super().setUp()
- self.lib = beets.library.Library(':memory:')
+ self.lib = beets.library.Library(":memory:")
items = [_common.item() for _ in range(3)]
- items[0].title = 'foo bar'
- items[0].artist = 'one'
- items[0].artists = ['one', 'eleven']
- items[0].album = 'baz'
+ items[0].title = "foo bar"
+ items[0].artist = "one"
+ items[0].artists = ["one", "eleven"]
+ items[0].album = "baz"
items[0].year = 2001
items[0].comp = True
- items[0].genre = 'rock'
- items[1].title = 'baz qux'
- items[1].artist = 'two'
- items[1].artists = ['two', 'twelve']
- items[1].album = 'baz'
+ items[0].genre = "rock"
+ items[1].title = "baz qux"
+ items[1].artist = "two"
+ items[1].artists = ["two", "twelve"]
+ items[1].album = "baz"
items[1].year = 2002
items[1].comp = True
- items[1].genre = 'Rock'
- items[2].title = 'beets 4 eva'
- items[2].artist = 'three'
- items[2].artists = ['three', 'one']
- items[2].album = 'foo'
+ items[1].genre = "Rock"
+ items[2].title = "beets 4 eva"
+ items[2].artist = "three"
+ items[2].artists = ["three", "one"]
+ items[2].album = "foo"
items[2].year = 2003
items[2].comp = False
- items[2].genre = 'Hard Rock'
+ items[2].genre = "Hard Rock"
for item in items:
self.lib.add(item)
self.album = self.lib.add_album(items[:2])
def assert_items_matched_all(self, results):
- self.assert_items_matched(results, [
- 'foo bar',
- 'baz qux',
- 'beets 4 eva',
- ])
+ self.assert_items_matched(
+ results,
+ [
+ "foo bar",
+ "baz qux",
+ "beets 4 eva",
+ ],
+ )
class GetTest(DummyDataTestCase):
def test_get_empty(self):
- q = ''
+ q = ""
results = self.lib.items(q)
self.assert_items_matched_all(results)
@@ -138,58 +145,58 @@ def test_get_none(self):
self.assert_items_matched_all(results)
def test_get_one_keyed_term(self):
- q = 'title:qux'
+ q = "title:qux"
results = self.lib.items(q)
- self.assert_items_matched(results, ['baz qux'])
+ self.assert_items_matched(results, ["baz qux"])
def test_get_one_keyed_exact(self):
- q = 'genre:=rock'
+ q = "genre:=rock"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar'])
- q = 'genre:=Rock'
+ self.assert_items_matched(results, ["foo bar"])
+ q = "genre:=Rock"
results = self.lib.items(q)
- self.assert_items_matched(results, ['baz qux'])
+ self.assert_items_matched(results, ["baz qux"])
q = 'genre:="Hard Rock"'
results = self.lib.items(q)
- self.assert_items_matched(results, ['beets 4 eva'])
+ self.assert_items_matched(results, ["beets 4 eva"])
def test_get_one_keyed_exact_nocase(self):
q = 'genre:=~"hard rock"'
results = self.lib.items(q)
- self.assert_items_matched(results, ['beets 4 eva'])
+ self.assert_items_matched(results, ["beets 4 eva"])
def test_get_one_keyed_regexp(self):
- q = 'artist::t.+r'
+ q = "artist::t.+r"
results = self.lib.items(q)
- self.assert_items_matched(results, ['beets 4 eva'])
+ self.assert_items_matched(results, ["beets 4 eva"])
def test_get_one_unkeyed_term(self):
- q = 'three'
+ q = "three"
results = self.lib.items(q)
- self.assert_items_matched(results, ['beets 4 eva'])
+ self.assert_items_matched(results, ["beets 4 eva"])
def test_get_one_unkeyed_exact(self):
- q = '=rock'
+ q = "=rock"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar'])
+ self.assert_items_matched(results, ["foo bar"])
def test_get_one_unkeyed_exact_nocase(self):
q = '=~"hard rock"'
results = self.lib.items(q)
- self.assert_items_matched(results, ['beets 4 eva'])
+ self.assert_items_matched(results, ["beets 4 eva"])
def test_get_one_unkeyed_regexp(self):
- q = ':x$'
+ q = ":x$"
results = self.lib.items(q)
- self.assert_items_matched(results, ['baz qux'])
+ self.assert_items_matched(results, ["baz qux"])
def test_get_no_matches(self):
- q = 'popebear'
+ q = "popebear"
results = self.lib.items(q)
self.assert_items_matched(results, [])
def test_invalid_key(self):
- q = 'pope:bear'
+ q = "pope:bear"
results = self.lib.items(q)
# Matches nothing since the flexattr is not present on the
# objects.
@@ -201,192 +208,213 @@ def test_get_no_matches_exact(self):
self.assert_items_matched(results, [])
def test_term_case_insensitive(self):
- q = 'oNE'
+ q = "oNE"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar'])
+ self.assert_items_matched(results, ["foo bar"])
def test_regexp_case_sensitive(self):
- q = ':oNE'
+ q = ":oNE"
results = self.lib.items(q)
self.assert_items_matched(results, [])
- q = ':one'
+ q = ":one"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar'])
+ self.assert_items_matched(results, ["foo bar"])
def test_term_case_insensitive_with_key(self):
- q = 'artist:thrEE'
+ q = "artist:thrEE"
results = self.lib.items(q)
- self.assert_items_matched(results, ['beets 4 eva'])
+ self.assert_items_matched(results, ["beets 4 eva"])
def test_term_case_regex_with_multi_key_matches(self):
- q = 'artists::eleven'
+ q = "artists::eleven"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar'])
+ self.assert_items_matched(results, ["foo bar"])
def test_term_case_regex_with_multi_key_matches_multiple_columns(self):
- q = 'artists::one'
+ q = "artists::one"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar', 'beets 4 eva'])
+ self.assert_items_matched(results, ["foo bar", "beets 4 eva"])
def test_key_case_insensitive(self):
- q = 'ArTiST:three'
+ q = "ArTiST:three"
results = self.lib.items(q)
- self.assert_items_matched(results, ['beets 4 eva'])
+ self.assert_items_matched(results, ["beets 4 eva"])
def test_keyed_matches_exact_nocase(self):
- q = 'genre:=~rock'
+ q = "genre:=~rock"
results = self.lib.items(q)
- self.assert_items_matched(results, [
- 'foo bar',
- 'baz qux',
- ])
+ self.assert_items_matched(
+ results,
+ [
+ "foo bar",
+ "baz qux",
+ ],
+ )
def test_unkeyed_term_matches_multiple_columns(self):
- q = 'baz'
+ q = "baz"
results = self.lib.items(q)
- self.assert_items_matched(results, [
- 'foo bar',
- 'baz qux',
- ])
+ self.assert_items_matched(
+ results,
+ [
+ "foo bar",
+ "baz qux",
+ ],
+ )
def test_unkeyed_regexp_matches_multiple_columns(self):
- q = ':z$'
+ q = ":z$"
results = self.lib.items(q)
- self.assert_items_matched(results, [
- 'foo bar',
- 'baz qux',
- ])
+ self.assert_items_matched(
+ results,
+ [
+ "foo bar",
+ "baz qux",
+ ],
+ )
def test_keyed_term_matches_only_one_column(self):
- q = 'title:baz'
+ q = "title:baz"
results = self.lib.items(q)
- self.assert_items_matched(results, ['baz qux'])
+ self.assert_items_matched(results, ["baz qux"])
def test_keyed_regexp_matches_only_one_column(self):
- q = 'title::baz'
+ q = "title::baz"
results = self.lib.items(q)
- self.assert_items_matched(results, [
- 'baz qux',
- ])
+ self.assert_items_matched(
+ results,
+ [
+ "baz qux",
+ ],
+ )
def test_multiple_terms_narrow_search(self):
- q = 'qux baz'
+ q = "qux baz"
results = self.lib.items(q)
- self.assert_items_matched(results, [
- 'baz qux',
- ])
+ self.assert_items_matched(
+ results,
+ [
+ "baz qux",
+ ],
+ )
def test_multiple_regexps_narrow_search(self):
- q = ':baz :qux'
+ q = ":baz :qux"
results = self.lib.items(q)
- self.assert_items_matched(results, ['baz qux'])
+ self.assert_items_matched(results, ["baz qux"])
def test_mixed_terms_regexps_narrow_search(self):
- q = ':baz qux'
+ q = ":baz qux"
results = self.lib.items(q)
- self.assert_items_matched(results, ['baz qux'])
+ self.assert_items_matched(results, ["baz qux"])
def test_single_year(self):
- q = 'year:2001'
+ q = "year:2001"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar'])
+ self.assert_items_matched(results, ["foo bar"])
def test_year_range(self):
- q = 'year:2000..2002'
+ q = "year:2000..2002"
results = self.lib.items(q)
- self.assert_items_matched(results, [
- 'foo bar',
- 'baz qux',
- ])
+ self.assert_items_matched(
+ results,
+ [
+ "foo bar",
+ "baz qux",
+ ],
+ )
def test_singleton_true(self):
- q = 'singleton:true'
+ q = "singleton:true"
results = self.lib.items(q)
- self.assert_items_matched(results, ['beets 4 eva'])
+ self.assert_items_matched(results, ["beets 4 eva"])
def test_singleton_1(self):
- q = 'singleton:1'
+ q = "singleton:1"
results = self.lib.items(q)
- self.assert_items_matched(results, ['beets 4 eva'])
+ self.assert_items_matched(results, ["beets 4 eva"])
def test_singleton_false(self):
- q = 'singleton:false'
+ q = "singleton:false"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar', 'baz qux'])
+ self.assert_items_matched(results, ["foo bar", "baz qux"])
def test_singleton_0(self):
- q = 'singleton:0'
+ q = "singleton:0"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar', 'baz qux'])
+ self.assert_items_matched(results, ["foo bar", "baz qux"])
def test_compilation_true(self):
- q = 'comp:true'
+ q = "comp:true"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar', 'baz qux'])
+ self.assert_items_matched(results, ["foo bar", "baz qux"])
def test_compilation_false(self):
- q = 'comp:false'
+ q = "comp:false"
results = self.lib.items(q)
- self.assert_items_matched(results, ['beets 4 eva'])
+ self.assert_items_matched(results, ["beets 4 eva"])
def test_unknown_field_name_no_results(self):
- q = 'xyzzy:nonsense'
+ q = "xyzzy:nonsense"
results = self.lib.items(q)
titles = [i.title for i in results]
self.assertEqual(titles, [])
def test_unknown_field_name_no_results_in_album_query(self):
- q = 'xyzzy:nonsense'
+ q = "xyzzy:nonsense"
results = self.lib.albums(q)
names = [a.album for a in results]
self.assertEqual(names, [])
def test_item_field_name_matches_nothing_in_album_query(self):
- q = 'format:nonsense'
+ q = "format:nonsense"
results = self.lib.albums(q)
names = [a.album for a in results]
self.assertEqual(names, [])
def test_unicode_query(self):
item = self.lib.items().get()
- item.title = 'caf\xe9'
+ item.title = "caf\xe9"
item.store()
- q = 'title:caf\xe9'
+ q = "title:caf\xe9"
results = self.lib.items(q)
- self.assert_items_matched(results, ['caf\xe9'])
+ self.assert_items_matched(results, ["caf\xe9"])
def test_numeric_search_positive(self):
- q = dbcore.query.NumericQuery('year', '2001')
+ q = dbcore.query.NumericQuery("year", "2001")
results = self.lib.items(q)
self.assertTrue(results)
def test_numeric_search_negative(self):
- q = dbcore.query.NumericQuery('year', '1999')
+ q = dbcore.query.NumericQuery("year", "1999")
results = self.lib.items(q)
self.assertFalse(results)
def test_album_field_fallback(self):
- self.album['albumflex'] = 'foo'
+ self.album["albumflex"] = "foo"
self.album.store()
- q = 'albumflex:foo'
+ q = "albumflex:foo"
results = self.lib.items(q)
- self.assert_items_matched(results, [
- 'foo bar',
- 'baz qux',
- ])
+ self.assert_items_matched(
+ results,
+ [
+ "foo bar",
+ "baz qux",
+ ],
+ )
def test_invalid_query(self):
with self.assertRaises(InvalidQueryArgumentValueError) as raised:
- dbcore.query.NumericQuery('year', '199a')
- self.assertIn('not an int', str(raised.exception))
+ dbcore.query.NumericQuery("year", "199a")
+ self.assertIn("not an int", str(raised.exception))
with self.assertRaises(InvalidQueryArgumentValueError) as raised:
- dbcore.query.RegexpQuery('year', '199(')
+ dbcore.query.RegexpQuery("year", "199(")
exception_text = str(raised.exception)
- self.assertIn('not a regular expression', exception_text)
- self.assertIn('unterminated subpattern', exception_text)
+ self.assertIn("not a regular expression", exception_text)
+ self.assertIn("unterminated subpattern", exception_text)
self.assertIsInstance(raised.exception, ParsingError)
@@ -396,63 +424,63 @@ def setUp(self):
self.item = _common.item()
def test_regex_match_positive(self):
- q = dbcore.query.RegexpQuery('album', '^the album$')
+ q = dbcore.query.RegexpQuery("album", "^the album$")
self.assertTrue(q.match(self.item))
def test_regex_match_negative(self):
- q = dbcore.query.RegexpQuery('album', '^album$')
+ q = dbcore.query.RegexpQuery("album", "^album$")
self.assertFalse(q.match(self.item))
def test_regex_match_non_string_value(self):
- q = dbcore.query.RegexpQuery('disc', '^6$')
+ q = dbcore.query.RegexpQuery("disc", "^6$")
self.assertTrue(q.match(self.item))
def test_substring_match_positive(self):
- q = dbcore.query.SubstringQuery('album', 'album')
+ q = dbcore.query.SubstringQuery("album", "album")
self.assertTrue(q.match(self.item))
def test_substring_match_negative(self):
- q = dbcore.query.SubstringQuery('album', 'ablum')
+ q = dbcore.query.SubstringQuery("album", "ablum")
self.assertFalse(q.match(self.item))
def test_substring_match_non_string_value(self):
- q = dbcore.query.SubstringQuery('disc', '6')
+ q = dbcore.query.SubstringQuery("disc", "6")
self.assertTrue(q.match(self.item))
def test_exact_match_nocase_positive(self):
- q = dbcore.query.StringQuery('genre', 'the genre')
+ q = dbcore.query.StringQuery("genre", "the genre")
self.assertTrue(q.match(self.item))
- q = dbcore.query.StringQuery('genre', 'THE GENRE')
+ q = dbcore.query.StringQuery("genre", "THE GENRE")
self.assertTrue(q.match(self.item))
def test_exact_match_nocase_negative(self):
- q = dbcore.query.StringQuery('genre', 'genre')
+ q = dbcore.query.StringQuery("genre", "genre")
self.assertFalse(q.match(self.item))
def test_year_match_positive(self):
- q = dbcore.query.NumericQuery('year', '1')
+ q = dbcore.query.NumericQuery("year", "1")
self.assertTrue(q.match(self.item))
def test_year_match_negative(self):
- q = dbcore.query.NumericQuery('year', '10')
+ q = dbcore.query.NumericQuery("year", "10")
self.assertFalse(q.match(self.item))
def test_bitrate_range_positive(self):
- q = dbcore.query.NumericQuery('bitrate', '100000..200000')
+ q = dbcore.query.NumericQuery("bitrate", "100000..200000")
self.assertTrue(q.match(self.item))
def test_bitrate_range_negative(self):
- q = dbcore.query.NumericQuery('bitrate', '200000..300000')
+ q = dbcore.query.NumericQuery("bitrate", "200000..300000")
self.assertFalse(q.match(self.item))
def test_open_range(self):
- dbcore.query.NumericQuery('bitrate', '100000..')
+ dbcore.query.NumericQuery("bitrate", "100000..")
def test_eq(self):
- q1 = dbcore.query.MatchQuery('foo', 'bar')
- q2 = dbcore.query.MatchQuery('foo', 'bar')
- q3 = dbcore.query.MatchQuery('foo', 'baz')
- q4 = dbcore.query.StringFieldQuery('foo', 'bar')
+ q1 = dbcore.query.MatchQuery("foo", "bar")
+ q2 = dbcore.query.MatchQuery("foo", "bar")
+ q3 = dbcore.query.MatchQuery("foo", "baz")
+ q4 = dbcore.query.StringFieldQuery("foo", "bar")
self.assertEqual(q1, q2)
self.assertNotEqual(q1, q3)
self.assertNotEqual(q1, q4)
@@ -464,17 +492,17 @@ def setUp(self):
super().setUp()
# This is the item we'll try to match.
- self.i.path = util.normpath('/a/b/c.mp3')
- self.i.title = 'path item'
- self.i.album = 'path album'
+ self.i.path = util.normpath("/a/b/c.mp3")
+ self.i.title = "path item"
+ self.i.album = "path album"
self.i.store()
self.lib.add_album([self.i])
# A second item for testing exclusion.
i2 = _common.item()
- i2.path = util.normpath('/x/y/z.mp3')
- i2.title = 'another item'
- i2.album = 'another album'
+ i2.path = util.normpath("/x/y/z.mp3")
+ i2.title = "another item"
+ i2.album = "another album"
self.lib.add(i2)
self.lib.add_album([i2])
@@ -488,35 +516,35 @@ def force_implicit_query_detection(self):
beets.library.PathQuery.force_implicit_query_detection = False
def test_path_exact_match(self):
- q = 'path:/a/b/c.mp3'
+ q = "path:/a/b/c.mp3"
results = self.lib.items(q)
- self.assert_items_matched(results, ['path item'])
+ self.assert_items_matched(results, ["path item"])
results = self.lib.albums(q)
self.assert_albums_matched(results, [])
# FIXME: fails on windows
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_parent_directory_no_slash(self):
- q = 'path:/a'
+ q = "path:/a"
results = self.lib.items(q)
- self.assert_items_matched(results, ['path item'])
+ self.assert_items_matched(results, ["path item"])
results = self.lib.albums(q)
- self.assert_albums_matched(results, ['path album'])
+ self.assert_albums_matched(results, ["path album"])
# FIXME: fails on windows
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_parent_directory_with_slash(self):
- q = 'path:/a/'
+ q = "path:/a/"
results = self.lib.items(q)
- self.assert_items_matched(results, ['path item'])
+ self.assert_items_matched(results, ["path item"])
results = self.lib.albums(q)
- self.assert_albums_matched(results, ['path album'])
+ self.assert_albums_matched(results, ["path album"])
def test_no_match(self):
- q = 'path:/xyzzy/'
+ q = "path:/xyzzy/"
results = self.lib.items(q)
self.assert_items_matched(results, [])
@@ -524,7 +552,7 @@ def test_no_match(self):
self.assert_albums_matched(results, [])
def test_fragment_no_match(self):
- q = 'path:/b/'
+ q = "path:/b/"
results = self.lib.items(q)
self.assert_items_matched(results, [])
@@ -532,33 +560,33 @@ def test_fragment_no_match(self):
self.assert_albums_matched(results, [])
def test_nonnorm_path(self):
- q = 'path:/x/../a/b'
+ q = "path:/x/../a/b"
results = self.lib.items(q)
- self.assert_items_matched(results, ['path item'])
+ self.assert_items_matched(results, ["path item"])
results = self.lib.albums(q)
- self.assert_albums_matched(results, ['path album'])
+ self.assert_albums_matched(results, ["path album"])
- @unittest.skipIf(sys.platform == 'win32', WIN32_NO_IMPLICIT_PATHS)
+ @unittest.skipIf(sys.platform == "win32", WIN32_NO_IMPLICIT_PATHS)
def test_slashed_query_matches_path(self):
with self.force_implicit_query_detection():
- q = '/a/b'
+ q = "/a/b"
results = self.lib.items(q)
- self.assert_items_matched(results, ['path item'])
+ self.assert_items_matched(results, ["path item"])
results = self.lib.albums(q)
- self.assert_albums_matched(results, ['path album'])
+ self.assert_albums_matched(results, ["path album"])
- @unittest.skipIf(sys.platform == 'win32', WIN32_NO_IMPLICIT_PATHS)
+ @unittest.skipIf(sys.platform == "win32", WIN32_NO_IMPLICIT_PATHS)
def test_path_query_in_or_query(self):
with self.force_implicit_query_detection():
- q = '/a/b , /a/b'
+ q = "/a/b , /a/b"
results = self.lib.items(q)
- self.assert_items_matched(results, ['path item'])
+ self.assert_items_matched(results, ["path item"])
def test_non_slashed_does_not_match_path(self):
with self.force_implicit_query_detection():
- q = 'c.mp3'
+ q = "c.mp3"
results = self.lib.items(q)
self.assert_items_matched(results, [])
@@ -567,78 +595,87 @@ def test_non_slashed_does_not_match_path(self):
def test_slashes_in_explicit_field_does_not_match_path(self):
with self.force_implicit_query_detection():
- q = 'title:/a/b'
+ q = "title:/a/b"
results = self.lib.items(q)
self.assert_items_matched(results, [])
def test_path_item_regex(self):
- q = 'path::c\\.mp3$'
+ q = "path::c\\.mp3$"
results = self.lib.items(q)
- self.assert_items_matched(results, ['path item'])
+ self.assert_items_matched(results, ["path item"])
def test_path_album_regex(self):
- q = 'path::b'
+ q = "path::b"
results = self.lib.albums(q)
- self.assert_albums_matched(results, ['path album'])
+ self.assert_albums_matched(results, ["path album"])
def test_escape_underscore(self):
- self.add_album(path=b'/a/_/title.mp3', title='with underscore',
- album='album with underscore')
- q = 'path:/a/_'
+ self.add_album(
+ path=b"/a/_/title.mp3",
+ title="with underscore",
+ album="album with underscore",
+ )
+ q = "path:/a/_"
results = self.lib.items(q)
- self.assert_items_matched(results, ['with underscore'])
+ self.assert_items_matched(results, ["with underscore"])
results = self.lib.albums(q)
- self.assert_albums_matched(results, ['album with underscore'])
+ self.assert_albums_matched(results, ["album with underscore"])
def test_escape_percent(self):
- self.add_album(path=b'/a/%/title.mp3', title='with percent',
- album='album with percent')
- q = 'path:/a/%'
+ self.add_album(
+ path=b"/a/%/title.mp3",
+ title="with percent",
+ album="album with percent",
+ )
+ q = "path:/a/%"
results = self.lib.items(q)
- self.assert_items_matched(results, ['with percent'])
+ self.assert_items_matched(results, ["with percent"])
results = self.lib.albums(q)
- self.assert_albums_matched(results, ['album with percent'])
+ self.assert_albums_matched(results, ["album with percent"])
def test_escape_backslash(self):
- self.add_album(path=br'/a/\x/title.mp3', title='with backslash',
- album='album with backslash')
- q = 'path:/a/\\\\x'
+ self.add_album(
+ path=rb"/a/\x/title.mp3",
+ title="with backslash",
+ album="album with backslash",
+ )
+ q = "path:/a/\\\\x"
results = self.lib.items(q)
- self.assert_items_matched(results, ['with backslash'])
+ self.assert_items_matched(results, ["with backslash"])
results = self.lib.albums(q)
- self.assert_albums_matched(results, ['album with backslash'])
+ self.assert_albums_matched(results, ["album with backslash"])
def test_case_sensitivity(self):
- self.add_album(path=b'/A/B/C2.mp3', title='caps path')
+ self.add_album(path=b"/A/B/C2.mp3", title="caps path")
- makeq = partial(beets.library.PathQuery, 'path', '/A/B')
+ makeq = partial(beets.library.PathQuery, "path", "/A/B")
results = self.lib.items(makeq(case_sensitive=True))
- self.assert_items_matched(results, ['caps path'])
+ self.assert_items_matched(results, ["caps path"])
results = self.lib.items(makeq(case_sensitive=False))
- self.assert_items_matched(results, ['path item', 'caps path'])
+ self.assert_items_matched(results, ["path item", "caps path"])
# FIXME: Also create a variant of this test for windows, which tests
# both os.sep and os.altsep
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_path_sep_detection(self):
is_path_query = beets.library.PathQuery.is_path_query
with self.force_implicit_query_detection():
- self.assertTrue(is_path_query('/foo/bar'))
- self.assertTrue(is_path_query('foo/bar'))
- self.assertTrue(is_path_query('foo/'))
- self.assertFalse(is_path_query('foo'))
- self.assertTrue(is_path_query('foo/:bar'))
- self.assertFalse(is_path_query('foo:bar/'))
- self.assertFalse(is_path_query('foo:/bar'))
+ self.assertTrue(is_path_query("/foo/bar"))
+ self.assertTrue(is_path_query("foo/bar"))
+ self.assertTrue(is_path_query("foo/"))
+ self.assertFalse(is_path_query("foo"))
+ self.assertTrue(is_path_query("foo/:bar"))
+ self.assertFalse(is_path_query("foo:bar/"))
+ self.assertFalse(is_path_query("foo:/bar"))
# FIXME: shouldn't this also work on windows?
- @unittest.skipIf(sys.platform == 'win32', WIN32_NO_IMPLICIT_PATHS)
+ @unittest.skipIf(sys.platform == "win32", WIN32_NO_IMPLICIT_PATHS)
def test_detect_absolute_path(self):
"""Test detection of implicit path queries based on whether or
not the path actually exists, when using an absolute path query.
@@ -648,9 +685,9 @@ def test_detect_absolute_path(self):
"""
is_path_query = beets.library.PathQuery.is_path_query
- path = self.touch(os.path.join(b'foo', b'bar'))
+ path = self.touch(os.path.join(b"foo", b"bar"))
self.assertTrue(os.path.isabs(util.syspath(path)))
- path_str = path.decode('utf-8')
+ path_str = path.decode("utf-8")
# The file itself.
self.assertTrue(is_path_query(path_str))
@@ -660,7 +697,7 @@ def test_detect_absolute_path(self):
self.assertTrue(is_path_query(parent))
# Some non-existent path.
- self.assertFalse(is_path_query(path_str + 'baz'))
+ self.assertFalse(is_path_query(path_str + "baz"))
def test_detect_relative_path(self):
"""Test detection of implicit path queries based on whether or
@@ -671,64 +708,62 @@ def test_detect_relative_path(self):
"""
is_path_query = beets.library.PathQuery.is_path_query
- self.touch(os.path.join(b'foo', b'bar'))
+ self.touch(os.path.join(b"foo", b"bar"))
# Temporarily change directory so relative paths work.
cur_dir = os.getcwd()
try:
os.chdir(syspath(self.temp_dir))
- self.assertTrue(is_path_query('foo/'))
- self.assertTrue(is_path_query('foo/bar'))
- self.assertTrue(is_path_query('foo/bar:tagada'))
- self.assertFalse(is_path_query('bar'))
+ self.assertTrue(is_path_query("foo/"))
+ self.assertTrue(is_path_query("foo/bar"))
+ self.assertTrue(is_path_query("foo/bar:tagada"))
+ self.assertFalse(is_path_query("bar"))
finally:
os.chdir(cur_dir)
class IntQueryTest(unittest.TestCase, TestHelper):
-
def setUp(self):
- self.lib = Library(':memory:')
+ self.lib = Library(":memory:")
def tearDown(self):
Item._types = {}
def test_exact_value_match(self):
item = self.add_item(bpm=120)
- matched = self.lib.items('bpm:120').get()
+ matched = self.lib.items("bpm:120").get()
self.assertEqual(item.id, matched.id)
def test_range_match(self):
item = self.add_item(bpm=120)
self.add_item(bpm=130)
- matched = self.lib.items('bpm:110..125')
+ matched = self.lib.items("bpm:110..125")
self.assertEqual(1, len(matched))
self.assertEqual(item.id, matched.get().id)
def test_flex_range_match(self):
- Item._types = {'myint': types.Integer()}
+ Item._types = {"myint": types.Integer()}
item = self.add_item(myint=2)
- matched = self.lib.items('myint:2').get()
+ matched = self.lib.items("myint:2").get()
self.assertEqual(item.id, matched.id)
def test_flex_dont_match_missing(self):
- Item._types = {'myint': types.Integer()}
+ Item._types = {"myint": types.Integer()}
self.add_item()
- matched = self.lib.items('myint:2').get()
+ matched = self.lib.items("myint:2").get()
self.assertIsNone(matched)
def test_no_substring_match(self):
self.add_item(bpm=120)
- matched = self.lib.items('bpm:12').get()
+ matched = self.lib.items("bpm:12").get()
self.assertIsNone(matched)
class BoolQueryTest(unittest.TestCase, TestHelper):
-
def setUp(self):
- self.lib = Library(':memory:')
- Item._types = {'flexbool': types.Boolean()}
+ self.lib = Library(":memory:")
+ Item._types = {"flexbool": types.Boolean()}
def tearDown(self):
Item._types = {}
@@ -736,35 +771,35 @@ def tearDown(self):
def test_parse_true(self):
item_true = self.add_item(comp=True)
item_false = self.add_item(comp=False)
- matched = self.lib.items('comp:true')
+ matched = self.lib.items("comp:true")
self.assertInResult(item_true, matched)
self.assertNotInResult(item_false, matched)
def test_flex_parse_true(self):
item_true = self.add_item(flexbool=True)
item_false = self.add_item(flexbool=False)
- matched = self.lib.items('flexbool:true')
+ matched = self.lib.items("flexbool:true")
self.assertInResult(item_true, matched)
self.assertNotInResult(item_false, matched)
def test_flex_parse_false(self):
item_true = self.add_item(flexbool=True)
item_false = self.add_item(flexbool=False)
- matched = self.lib.items('flexbool:false')
+ matched = self.lib.items("flexbool:false")
self.assertInResult(item_false, matched)
self.assertNotInResult(item_true, matched)
def test_flex_parse_1(self):
item_true = self.add_item(flexbool=True)
item_false = self.add_item(flexbool=False)
- matched = self.lib.items('flexbool:1')
+ matched = self.lib.items("flexbool:1")
self.assertInResult(item_true, matched)
self.assertNotInResult(item_false, matched)
def test_flex_parse_0(self):
item_true = self.add_item(flexbool=True)
item_false = self.add_item(flexbool=False)
- matched = self.lib.items('flexbool:0')
+ matched = self.lib.items("flexbool:0")
self.assertInResult(item_false, matched)
self.assertNotInResult(item_true, matched)
@@ -772,65 +807,64 @@ def test_flex_parse_any_string(self):
# TODO this should be the other way around
item_true = self.add_item(flexbool=True)
item_false = self.add_item(flexbool=False)
- matched = self.lib.items('flexbool:something')
+ matched = self.lib.items("flexbool:something")
self.assertInResult(item_false, matched)
self.assertNotInResult(item_true, matched)
class DefaultSearchFieldsTest(DummyDataTestCase):
def test_albums_matches_album(self):
- albums = list(self.lib.albums('baz'))
+ albums = list(self.lib.albums("baz"))
self.assertEqual(len(albums), 1)
def test_albums_matches_albumartist(self):
- albums = list(self.lib.albums(['album artist']))
+ albums = list(self.lib.albums(["album artist"]))
self.assertEqual(len(albums), 1)
def test_items_matches_title(self):
- items = self.lib.items('beets')
- self.assert_items_matched(items, ['beets 4 eva'])
+ items = self.lib.items("beets")
+ self.assert_items_matched(items, ["beets 4 eva"])
def test_items_does_not_match_year(self):
- items = self.lib.items('2001')
+ items = self.lib.items("2001")
self.assert_items_matched(items, [])
class NoneQueryTest(unittest.TestCase, TestHelper):
-
def setUp(self):
- self.lib = Library(':memory:')
+ self.lib = Library(":memory:")
def test_match_singletons(self):
singleton = self.add_item()
album_item = self.add_album().items().get()
- matched = self.lib.items(NoneQuery('album_id'))
+ matched = self.lib.items(NoneQuery("album_id"))
self.assertInResult(singleton, matched)
self.assertNotInResult(album_item, matched)
def test_match_after_set_none(self):
item = self.add_item(rg_track_gain=0)
- matched = self.lib.items(NoneQuery('rg_track_gain'))
+ matched = self.lib.items(NoneQuery("rg_track_gain"))
self.assertNotInResult(item, matched)
- item['rg_track_gain'] = None
+ item["rg_track_gain"] = None
item.store()
- matched = self.lib.items(NoneQuery('rg_track_gain'))
+ matched = self.lib.items(NoneQuery("rg_track_gain"))
self.assertInResult(item, matched)
def test_match_slow(self):
item = self.add_item()
- matched = self.lib.items(NoneQuery('rg_track_peak', fast=False))
+ matched = self.lib.items(NoneQuery("rg_track_peak", fast=False))
self.assertInResult(item, matched)
def test_match_slow_after_set_none(self):
item = self.add_item(rg_track_gain=0)
- matched = self.lib.items(NoneQuery('rg_track_gain', fast=False))
+ matched = self.lib.items(NoneQuery("rg_track_gain", fast=False))
self.assertNotInResult(item, matched)
- item['rg_track_gain'] = None
+ item["rg_track_gain"] = None
item.store()
- matched = self.lib.items(NoneQuery('rg_track_gain', fast=False))
+ matched = self.lib.items(NoneQuery("rg_track_gain", fast=False))
self.assertInResult(item, matched)
@@ -845,57 +879,57 @@ def setUp(self):
self.item = _common.item()
def test_regex_match_positive(self):
- q = dbcore.query.RegexpQuery('album', '^the album$')
+ q = dbcore.query.RegexpQuery("album", "^the album$")
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_regex_match_negative(self):
- q = dbcore.query.RegexpQuery('album', '^album$')
+ q = dbcore.query.RegexpQuery("album", "^album$")
self.assertFalse(q.match(self.item))
self.assertTrue(dbcore.query.NotQuery(q).match(self.item))
def test_regex_match_non_string_value(self):
- q = dbcore.query.RegexpQuery('disc', '^6$')
+ q = dbcore.query.RegexpQuery("disc", "^6$")
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_substring_match_positive(self):
- q = dbcore.query.SubstringQuery('album', 'album')
+ q = dbcore.query.SubstringQuery("album", "album")
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_substring_match_negative(self):
- q = dbcore.query.SubstringQuery('album', 'ablum')
+ q = dbcore.query.SubstringQuery("album", "ablum")
self.assertFalse(q.match(self.item))
self.assertTrue(dbcore.query.NotQuery(q).match(self.item))
def test_substring_match_non_string_value(self):
- q = dbcore.query.SubstringQuery('disc', '6')
+ q = dbcore.query.SubstringQuery("disc", "6")
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_year_match_positive(self):
- q = dbcore.query.NumericQuery('year', '1')
+ q = dbcore.query.NumericQuery("year", "1")
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_year_match_negative(self):
- q = dbcore.query.NumericQuery('year', '10')
+ q = dbcore.query.NumericQuery("year", "10")
self.assertFalse(q.match(self.item))
self.assertTrue(dbcore.query.NotQuery(q).match(self.item))
def test_bitrate_range_positive(self):
- q = dbcore.query.NumericQuery('bitrate', '100000..200000')
+ q = dbcore.query.NumericQuery("bitrate", "100000..200000")
self.assertTrue(q.match(self.item))
self.assertFalse(dbcore.query.NotQuery(q).match(self.item))
def test_bitrate_range_negative(self):
- q = dbcore.query.NumericQuery('bitrate', '200000..300000')
+ q = dbcore.query.NumericQuery("bitrate", "200000..300000")
self.assertFalse(q.match(self.item))
self.assertTrue(dbcore.query.NotQuery(q).match(self.item))
def test_open_range(self):
- q = dbcore.query.NumericQuery('bitrate', '100000..')
+ q = dbcore.query.NumericQuery("bitrate", "100000..")
dbcore.query.NotQuery(q)
@@ -927,39 +961,45 @@ def assertNegationProperties(self, q): # noqa
# round trip
not_not_q = dbcore.query.NotQuery(not_q)
- self.assertEqual({i.title for i in self.lib.items(q)},
- {i.title for i in self.lib.items(not_not_q)})
+ self.assertEqual(
+ {i.title for i in self.lib.items(q)},
+ {i.title for i in self.lib.items(not_not_q)},
+ )
def test_type_and(self):
# not(a and b) <-> not(a) or not(b)
- q = dbcore.query.AndQuery([
- dbcore.query.BooleanQuery('comp', True),
- dbcore.query.NumericQuery('year', '2002')],
+ q = dbcore.query.AndQuery(
+ [
+ dbcore.query.BooleanQuery("comp", True),
+ dbcore.query.NumericQuery("year", "2002"),
+ ],
)
not_results = self.lib.items(dbcore.query.NotQuery(q))
- self.assert_items_matched(not_results, ['foo bar', 'beets 4 eva'])
+ self.assert_items_matched(not_results, ["foo bar", "beets 4 eva"])
self.assertNegationProperties(q)
def test_type_anyfield(self):
- q = dbcore.query.AnyFieldQuery('foo', ['title', 'artist', 'album'],
- dbcore.query.SubstringQuery)
+ q = dbcore.query.AnyFieldQuery(
+ "foo", ["title", "artist", "album"], dbcore.query.SubstringQuery
+ )
not_results = self.lib.items(dbcore.query.NotQuery(q))
- self.assert_items_matched(not_results, ['baz qux'])
+ self.assert_items_matched(not_results, ["baz qux"])
self.assertNegationProperties(q)
def test_type_boolean(self):
- q = dbcore.query.BooleanQuery('comp', True)
+ q = dbcore.query.BooleanQuery("comp", True)
not_results = self.lib.items(dbcore.query.NotQuery(q))
- self.assert_items_matched(not_results, ['beets 4 eva'])
+ self.assert_items_matched(not_results, ["beets 4 eva"])
self.assertNegationProperties(q)
def test_type_date(self):
- q = dbcore.query.DateQuery('added', '2000-01-01')
+ q = dbcore.query.DateQuery("added", "2000-01-01")
not_results = self.lib.items(dbcore.query.NotQuery(q))
# query date is in the past, thus the 'not' results should contain all
# items
- self.assert_items_matched(not_results, ['foo bar', 'baz qux',
- 'beets 4 eva'])
+ self.assert_items_matched(
+ not_results, ["foo bar", "baz qux", "beets 4 eva"]
+ )
self.assertNegationProperties(q)
def test_type_false(self):
@@ -969,41 +1009,45 @@ def test_type_false(self):
self.assertNegationProperties(q)
def test_type_match(self):
- q = dbcore.query.MatchQuery('year', '2003')
+ q = dbcore.query.MatchQuery("year", "2003")
not_results = self.lib.items(dbcore.query.NotQuery(q))
- self.assert_items_matched(not_results, ['foo bar', 'baz qux'])
+ self.assert_items_matched(not_results, ["foo bar", "baz qux"])
self.assertNegationProperties(q)
def test_type_none(self):
- q = dbcore.query.NoneQuery('rg_track_gain')
+ q = dbcore.query.NoneQuery("rg_track_gain")
not_results = self.lib.items(dbcore.query.NotQuery(q))
self.assert_items_matched(not_results, [])
self.assertNegationProperties(q)
def test_type_numeric(self):
- q = dbcore.query.NumericQuery('year', '2001..2002')
+ q = dbcore.query.NumericQuery("year", "2001..2002")
not_results = self.lib.items(dbcore.query.NotQuery(q))
- self.assert_items_matched(not_results, ['beets 4 eva'])
+ self.assert_items_matched(not_results, ["beets 4 eva"])
self.assertNegationProperties(q)
def test_type_or(self):
# not(a or b) <-> not(a) and not(b)
- q = dbcore.query.OrQuery([dbcore.query.BooleanQuery('comp', True),
- dbcore.query.NumericQuery('year', '2002')])
+ q = dbcore.query.OrQuery(
+ [
+ dbcore.query.BooleanQuery("comp", True),
+ dbcore.query.NumericQuery("year", "2002"),
+ ]
+ )
not_results = self.lib.items(dbcore.query.NotQuery(q))
- self.assert_items_matched(not_results, ['beets 4 eva'])
+ self.assert_items_matched(not_results, ["beets 4 eva"])
self.assertNegationProperties(q)
def test_type_regexp(self):
- q = dbcore.query.RegexpQuery('artist', '^t')
+ q = dbcore.query.RegexpQuery("artist", "^t")
not_results = self.lib.items(dbcore.query.NotQuery(q))
- self.assert_items_matched(not_results, ['foo bar'])
+ self.assert_items_matched(not_results, ["foo bar"])
self.assertNegationProperties(q)
def test_type_substring(self):
- q = dbcore.query.SubstringQuery('album', 'ba')
+ q = dbcore.query.SubstringQuery("album", "ba")
not_results = self.lib.items(dbcore.query.NotQuery(q))
- self.assert_items_matched(not_results, ['beets 4 eva'])
+ self.assert_items_matched(not_results, ["beets 4 eva"])
self.assertNegationProperties(q)
def test_type_true(self):
@@ -1014,41 +1058,41 @@ def test_type_true(self):
def test_get_prefixes_keyed(self):
"""Test both negation prefixes on a keyed query."""
- q0 = '-title:qux'
- q1 = '^title:qux'
+ q0 = "-title:qux"
+ q1 = "^title:qux"
results0 = self.lib.items(q0)
results1 = self.lib.items(q1)
- self.assert_items_matched(results0, ['foo bar', 'beets 4 eva'])
- self.assert_items_matched(results1, ['foo bar', 'beets 4 eva'])
+ self.assert_items_matched(results0, ["foo bar", "beets 4 eva"])
+ self.assert_items_matched(results1, ["foo bar", "beets 4 eva"])
def test_get_prefixes_unkeyed(self):
"""Test both negation prefixes on an unkeyed query."""
- q0 = '-qux'
- q1 = '^qux'
+ q0 = "-qux"
+ q1 = "^qux"
results0 = self.lib.items(q0)
results1 = self.lib.items(q1)
- self.assert_items_matched(results0, ['foo bar', 'beets 4 eva'])
- self.assert_items_matched(results1, ['foo bar', 'beets 4 eva'])
+ self.assert_items_matched(results0, ["foo bar", "beets 4 eva"])
+ self.assert_items_matched(results1, ["foo bar", "beets 4 eva"])
def test_get_one_keyed_regexp(self):
- q = '-artist::t.+r'
+ q = "-artist::t.+r"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar', 'baz qux'])
+ self.assert_items_matched(results, ["foo bar", "baz qux"])
def test_get_one_unkeyed_regexp(self):
- q = '-:x$'
+ q = "-:x$"
results = self.lib.items(q)
- self.assert_items_matched(results, ['foo bar', 'beets 4 eva'])
+ self.assert_items_matched(results, ["foo bar", "beets 4 eva"])
def test_get_multiple_terms(self):
- q = 'baz -bar'
+ q = "baz -bar"
results = self.lib.items(q)
- self.assert_items_matched(results, ['baz qux'])
+ self.assert_items_matched(results, ["baz qux"])
def test_get_mixed_terms(self):
- q = 'baz -title:bar'
+ q = "baz -title:bar"
results = self.lib.items(q)
- self.assert_items_matched(results, ['baz qux'])
+ self.assert_items_matched(results, ["baz qux"])
def test_fast_vs_slow(self):
"""Test that the results are the same regardless of the `fast` flag
@@ -1058,21 +1102,25 @@ def test_fast_vs_slow(self):
AttributeError: type object 'NoneQuery' has no attribute 'field'
at NoneQuery.match() (due to being @classmethod, and no self?)
"""
- classes = [(dbcore.query.DateQuery, ['added', '2001-01-01']),
- (dbcore.query.MatchQuery, ['artist', 'one']),
- # (dbcore.query.NoneQuery, ['rg_track_gain']),
- (dbcore.query.NumericQuery, ['year', '2002']),
- (dbcore.query.StringFieldQuery, ['year', '2001']),
- (dbcore.query.RegexpQuery, ['album', '^.a']),
- (dbcore.query.SubstringQuery, ['title', 'x'])]
+ classes = [
+ (dbcore.query.DateQuery, ["added", "2001-01-01"]),
+ (dbcore.query.MatchQuery, ["artist", "one"]),
+ # (dbcore.query.NoneQuery, ['rg_track_gain']),
+ (dbcore.query.NumericQuery, ["year", "2002"]),
+ (dbcore.query.StringFieldQuery, ["year", "2001"]),
+ (dbcore.query.RegexpQuery, ["album", "^.a"]),
+ (dbcore.query.SubstringQuery, ["title", "x"]),
+ ]
for klass, args in classes:
q_fast = dbcore.query.NotQuery(klass(*(args + [True])))
q_slow = dbcore.query.NotQuery(klass(*(args + [False])))
try:
- self.assertEqual([i.title for i in self.lib.items(q_fast)],
- [i.title for i in self.lib.items(q_slow)])
+ self.assertEqual(
+ [i.title for i in self.lib.items(q_fast)],
+ [i.title for i in self.lib.items(q_slow)],
+ )
except NotImplementedError:
# ignore classes that do not provide `fast` implementation
pass
@@ -1082,5 +1130,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_sort.py b/test/test_sort.py
index 70b1bc93e0..035d5ea701 100644
--- a/test/test_sort.py
+++ b/test/test_sort.py
@@ -17,9 +17,9 @@
import unittest
from test import _common
+
import beets.library
-from beets import dbcore
-from beets import config
+from beets import config, dbcore
# A test case class providing a library with some dummy data and some
@@ -27,7 +27,7 @@
class DummyDataTestCase(_common.TestCase):
def setUp(self):
super().setUp()
- self.lib = beets.library.Library(':memory:')
+ self.lib = beets.library.Library(":memory:")
albums = [_common.album() for _ in range(3)]
albums[0].album = "Album A"
@@ -55,9 +55,9 @@ def setUp(self):
self.lib.add(album)
items = [_common.item() for _ in range(4)]
- items[0].title = 'Foo bar'
- items[0].artist = 'One'
- items[0].album = 'Baz'
+ items[0].title = "Foo bar"
+ items[0].artist = "One"
+ items[0].album = "Baz"
items[0].year = 2001
items[0].comp = True
items[0].flex1 = "Flex1-0"
@@ -66,9 +66,9 @@ def setUp(self):
items[0].artist_sort = None
items[0].path = "/path0.mp3"
items[0].track = 1
- items[1].title = 'Baz qux'
- items[1].artist = 'Two'
- items[1].album = 'Baz'
+ items[1].title = "Baz qux"
+ items[1].artist = "Two"
+ items[1].album = "Baz"
items[1].year = 2002
items[1].comp = True
items[1].flex1 = "Flex1-1"
@@ -77,9 +77,9 @@ def setUp(self):
items[1].artist_sort = None
items[1].path = "/patH1.mp3"
items[1].track = 2
- items[2].title = 'Beets 4 eva'
- items[2].artist = 'Three'
- items[2].album = 'Foo'
+ items[2].title = "Beets 4 eva"
+ items[2].artist = "Three"
+ items[2].album = "Foo"
items[2].year = 2003
items[2].comp = False
items[2].flex1 = "Flex1-2"
@@ -88,9 +88,9 @@ def setUp(self):
items[2].artist_sort = None
items[2].path = "/paTH2.mp3"
items[2].track = 3
- items[3].title = 'Beets 4 eva'
- items[3].artist = 'Three'
- items[3].album = 'Foo2'
+ items[3].title = "Beets 4 eva"
+ items[3].artist = "Three"
+ items[3].album = "Foo2"
items[3].year = 2004
items[3].comp = False
items[3].flex1 = "Flex1-2"
@@ -105,100 +105,100 @@ def setUp(self):
class SortFixedFieldTest(DummyDataTestCase):
def test_sort_asc(self):
- q = ''
+ q = ""
sort = dbcore.query.FixedFieldSort("year", True)
results = self.lib.items(q, sort)
- self.assertLessEqual(results[0]['year'], results[1]['year'])
- self.assertEqual(results[0]['year'], 2001)
+ self.assertLessEqual(results[0]["year"], results[1]["year"])
+ self.assertEqual(results[0]["year"], 2001)
# same thing with query string
- q = 'year+'
+ q = "year+"
results2 = self.lib.items(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
def test_sort_desc(self):
- q = ''
+ q = ""
sort = dbcore.query.FixedFieldSort("year", False)
results = self.lib.items(q, sort)
- self.assertGreaterEqual(results[0]['year'], results[1]['year'])
- self.assertEqual(results[0]['year'], 2004)
+ self.assertGreaterEqual(results[0]["year"], results[1]["year"])
+ self.assertEqual(results[0]["year"], 2004)
# same thing with query string
- q = 'year-'
+ q = "year-"
results2 = self.lib.items(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
def test_sort_two_field_asc(self):
- q = ''
+ q = ""
s1 = dbcore.query.FixedFieldSort("album", True)
s2 = dbcore.query.FixedFieldSort("year", True)
sort = dbcore.query.MultipleSort()
sort.add_sort(s1)
sort.add_sort(s2)
results = self.lib.items(q, sort)
- self.assertLessEqual(results[0]['album'], results[1]['album'])
- self.assertLessEqual(results[1]['album'], results[2]['album'])
- self.assertEqual(results[0]['album'], 'Baz')
- self.assertEqual(results[1]['album'], 'Baz')
- self.assertLessEqual(results[0]['year'], results[1]['year'])
+ self.assertLessEqual(results[0]["album"], results[1]["album"])
+ self.assertLessEqual(results[1]["album"], results[2]["album"])
+ self.assertEqual(results[0]["album"], "Baz")
+ self.assertEqual(results[1]["album"], "Baz")
+ self.assertLessEqual(results[0]["year"], results[1]["year"])
# same thing with query string
- q = 'album+ year+'
+ q = "album+ year+"
results2 = self.lib.items(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
def test_sort_path_field(self):
- q = ''
- sort = dbcore.query.FixedFieldSort('path', True)
+ q = ""
+ sort = dbcore.query.FixedFieldSort("path", True)
results = self.lib.items(q, sort)
- self.assertEqual(results[0]['path'], b'/path0.mp3')
- self.assertEqual(results[1]['path'], b'/patH1.mp3')
- self.assertEqual(results[2]['path'], b'/paTH2.mp3')
- self.assertEqual(results[3]['path'], b'/PATH3.mp3')
+ self.assertEqual(results[0]["path"], b"/path0.mp3")
+ self.assertEqual(results[1]["path"], b"/patH1.mp3")
+ self.assertEqual(results[2]["path"], b"/paTH2.mp3")
+ self.assertEqual(results[3]["path"], b"/PATH3.mp3")
class SortFlexFieldTest(DummyDataTestCase):
def test_sort_asc(self):
- q = ''
+ q = ""
sort = dbcore.query.SlowFieldSort("flex1", True)
results = self.lib.items(q, sort)
- self.assertLessEqual(results[0]['flex1'], results[1]['flex1'])
- self.assertEqual(results[0]['flex1'], 'Flex1-0')
+ self.assertLessEqual(results[0]["flex1"], results[1]["flex1"])
+ self.assertEqual(results[0]["flex1"], "Flex1-0")
# same thing with query string
- q = 'flex1+'
+ q = "flex1+"
results2 = self.lib.items(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
def test_sort_desc(self):
- q = ''
+ q = ""
sort = dbcore.query.SlowFieldSort("flex1", False)
results = self.lib.items(q, sort)
- self.assertGreaterEqual(results[0]['flex1'], results[1]['flex1'])
- self.assertGreaterEqual(results[1]['flex1'], results[2]['flex1'])
- self.assertGreaterEqual(results[2]['flex1'], results[3]['flex1'])
- self.assertEqual(results[0]['flex1'], 'Flex1-2')
+ self.assertGreaterEqual(results[0]["flex1"], results[1]["flex1"])
+ self.assertGreaterEqual(results[1]["flex1"], results[2]["flex1"])
+ self.assertGreaterEqual(results[2]["flex1"], results[3]["flex1"])
+ self.assertEqual(results[0]["flex1"], "Flex1-2")
# same thing with query string
- q = 'flex1-'
+ q = "flex1-"
results2 = self.lib.items(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
def test_sort_two_field(self):
- q = ''
+ q = ""
s1 = dbcore.query.SlowFieldSort("flex2", False)
s2 = dbcore.query.SlowFieldSort("flex1", True)
sort = dbcore.query.MultipleSort()
sort.add_sort(s1)
sort.add_sort(s2)
results = self.lib.items(q, sort)
- self.assertGreaterEqual(results[0]['flex2'], results[1]['flex2'])
- self.assertGreaterEqual(results[1]['flex2'], results[2]['flex2'])
- self.assertEqual(results[0]['flex2'], 'Flex2-A')
- self.assertEqual(results[1]['flex2'], 'Flex2-A')
- self.assertLessEqual(results[0]['flex1'], results[1]['flex1'])
+ self.assertGreaterEqual(results[0]["flex2"], results[1]["flex2"])
+ self.assertGreaterEqual(results[1]["flex2"], results[2]["flex2"])
+ self.assertEqual(results[0]["flex2"], "Flex2-A")
+ self.assertEqual(results[1]["flex2"], "Flex2-A")
+ self.assertLessEqual(results[0]["flex1"], results[1]["flex1"])
# same thing with query string
- q = 'flex2- flex1+'
+ q = "flex2- flex1+"
results2 = self.lib.items(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
@@ -206,44 +206,44 @@ def test_sort_two_field(self):
class SortAlbumFixedFieldTest(DummyDataTestCase):
def test_sort_asc(self):
- q = ''
+ q = ""
sort = dbcore.query.FixedFieldSort("year", True)
results = self.lib.albums(q, sort)
- self.assertLessEqual(results[0]['year'], results[1]['year'])
- self.assertEqual(results[0]['year'], 2001)
+ self.assertLessEqual(results[0]["year"], results[1]["year"])
+ self.assertEqual(results[0]["year"], 2001)
# same thing with query string
- q = 'year+'
+ q = "year+"
results2 = self.lib.albums(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
def test_sort_desc(self):
- q = ''
+ q = ""
sort = dbcore.query.FixedFieldSort("year", False)
results = self.lib.albums(q, sort)
- self.assertGreaterEqual(results[0]['year'], results[1]['year'])
- self.assertEqual(results[0]['year'], 2005)
+ self.assertGreaterEqual(results[0]["year"], results[1]["year"])
+ self.assertEqual(results[0]["year"], 2005)
# same thing with query string
- q = 'year-'
+ q = "year-"
results2 = self.lib.albums(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
def test_sort_two_field_asc(self):
- q = ''
+ q = ""
s1 = dbcore.query.FixedFieldSort("genre", True)
s2 = dbcore.query.FixedFieldSort("album", True)
sort = dbcore.query.MultipleSort()
sort.add_sort(s1)
sort.add_sort(s2)
results = self.lib.albums(q, sort)
- self.assertLessEqual(results[0]['genre'], results[1]['genre'])
- self.assertLessEqual(results[1]['genre'], results[2]['genre'])
- self.assertEqual(results[1]['genre'], 'Rock')
- self.assertEqual(results[2]['genre'], 'Rock')
- self.assertLessEqual(results[1]['album'], results[2]['album'])
+ self.assertLessEqual(results[0]["genre"], results[1]["genre"])
+ self.assertLessEqual(results[1]["genre"], results[2]["genre"])
+ self.assertEqual(results[1]["genre"], "Rock")
+ self.assertEqual(results[2]["genre"], "Rock")
+ self.assertLessEqual(results[1]["album"], results[2]["album"])
# same thing with query string
- q = 'genre+ album+'
+ q = "genre+ album+"
results2 = self.lib.albums(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
@@ -251,44 +251,44 @@ def test_sort_two_field_asc(self):
class SortAlbumFlexFieldTest(DummyDataTestCase):
def test_sort_asc(self):
- q = ''
+ q = ""
sort = dbcore.query.SlowFieldSort("flex1", True)
results = self.lib.albums(q, sort)
- self.assertLessEqual(results[0]['flex1'], results[1]['flex1'])
- self.assertLessEqual(results[1]['flex1'], results[2]['flex1'])
+ self.assertLessEqual(results[0]["flex1"], results[1]["flex1"])
+ self.assertLessEqual(results[1]["flex1"], results[2]["flex1"])
# same thing with query string
- q = 'flex1+'
+ q = "flex1+"
results2 = self.lib.albums(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
def test_sort_desc(self):
- q = ''
+ q = ""
sort = dbcore.query.SlowFieldSort("flex1", False)
results = self.lib.albums(q, sort)
- self.assertGreaterEqual(results[0]['flex1'], results[1]['flex1'])
- self.assertGreaterEqual(results[1]['flex1'], results[2]['flex1'])
+ self.assertGreaterEqual(results[0]["flex1"], results[1]["flex1"])
+ self.assertGreaterEqual(results[1]["flex1"], results[2]["flex1"])
# same thing with query string
- q = 'flex1-'
+ q = "flex1-"
results2 = self.lib.albums(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
def test_sort_two_field_asc(self):
- q = ''
+ q = ""
s1 = dbcore.query.SlowFieldSort("flex2", True)
s2 = dbcore.query.SlowFieldSort("flex1", True)
sort = dbcore.query.MultipleSort()
sort.add_sort(s1)
sort.add_sort(s2)
results = self.lib.albums(q, sort)
- self.assertLessEqual(results[0]['flex2'], results[1]['flex2'])
- self.assertLessEqual(results[1]['flex2'], results[2]['flex2'])
- self.assertEqual(results[0]['flex2'], 'Flex2-A')
- self.assertEqual(results[1]['flex2'], 'Flex2-A')
- self.assertLessEqual(results[0]['flex1'], results[1]['flex1'])
+ self.assertLessEqual(results[0]["flex2"], results[1]["flex2"])
+ self.assertLessEqual(results[1]["flex2"], results[2]["flex2"])
+ self.assertEqual(results[0]["flex2"], "Flex2-A")
+ self.assertEqual(results[1]["flex2"], "Flex2-A")
+ self.assertLessEqual(results[0]["flex1"], results[1]["flex1"])
# same thing with query string
- q = 'flex2+ flex1+'
+ q = "flex2+ flex1+"
results2 = self.lib.albums(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
@@ -296,25 +296,25 @@ def test_sort_two_field_asc(self):
class SortAlbumComputedFieldTest(DummyDataTestCase):
def test_sort_asc(self):
- q = ''
+ q = ""
sort = dbcore.query.SlowFieldSort("path", True)
results = self.lib.albums(q, sort)
- self.assertLessEqual(results[0]['path'], results[1]['path'])
- self.assertLessEqual(results[1]['path'], results[2]['path'])
+ self.assertLessEqual(results[0]["path"], results[1]["path"])
+ self.assertLessEqual(results[1]["path"], results[2]["path"])
# same thing with query string
- q = 'path+'
+ q = "path+"
results2 = self.lib.albums(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
def test_sort_desc(self):
- q = ''
+ q = ""
sort = dbcore.query.SlowFieldSort("path", False)
results = self.lib.albums(q, sort)
- self.assertGreaterEqual(results[0]['path'], results[1]['path'])
- self.assertGreaterEqual(results[1]['path'], results[2]['path'])
+ self.assertGreaterEqual(results[0]["path"], results[1]["path"])
+ self.assertGreaterEqual(results[1]["path"], results[2]["path"])
# same thing with query string
- q = 'path-'
+ q = "path-"
results2 = self.lib.albums(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
@@ -322,32 +322,32 @@ def test_sort_desc(self):
class SortCombinedFieldTest(DummyDataTestCase):
def test_computed_first(self):
- q = ''
+ q = ""
s1 = dbcore.query.SlowFieldSort("path", True)
s2 = dbcore.query.FixedFieldSort("year", True)
sort = dbcore.query.MultipleSort()
sort.add_sort(s1)
sort.add_sort(s2)
results = self.lib.albums(q, sort)
- self.assertLessEqual(results[0]['path'], results[1]['path'])
- self.assertLessEqual(results[1]['path'], results[2]['path'])
- q = 'path+ year+'
+ self.assertLessEqual(results[0]["path"], results[1]["path"])
+ self.assertLessEqual(results[1]["path"], results[2]["path"])
+ q = "path+ year+"
results2 = self.lib.albums(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
def test_computed_second(self):
- q = ''
+ q = ""
s1 = dbcore.query.FixedFieldSort("year", True)
s2 = dbcore.query.SlowFieldSort("path", True)
sort = dbcore.query.MultipleSort()
sort.add_sort(s1)
sort.add_sort(s2)
results = self.lib.albums(q, sort)
- self.assertLessEqual(results[0]['year'], results[1]['year'])
- self.assertLessEqual(results[1]['year'], results[2]['year'])
- self.assertLessEqual(results[0]['path'], results[1]['path'])
- q = 'year+ path+'
+ self.assertLessEqual(results[0]["year"], results[1]["year"])
+ self.assertLessEqual(results[1]["year"], results[2]["year"])
+ self.assertLessEqual(results[0]["path"], results[1]["path"])
+ q = "year+ path+"
results2 = self.lib.albums(q)
for r1, r2 in zip(results, results2):
self.assertEqual(r1.id, r2.id)
@@ -359,7 +359,7 @@ def test_default_sort_item(self):
self.assertLess(results[0].artist, results[1].artist)
def test_config_opposite_sort_item(self):
- config['sort_item'] = 'artist-'
+ config["sort_item"] = "artist-"
results = list(self.lib.items())
self.assertGreater(results[0].artist, results[1].artist)
@@ -368,7 +368,7 @@ def test_default_sort_album(self):
self.assertLess(results[0].albumartist, results[1].albumartist)
def test_config_opposite_sort_album(self):
- config['sort_album'] = 'albumartist-'
+ config["sort_album"] = "albumartist-"
results = list(self.lib.albums())
self.assertGreater(results[0].albumartist, results[1].albumartist)
@@ -392,9 +392,9 @@ def setUp(self):
self.lib.add(album)
item = _common.item()
- item.title = 'another'
- item.artist = 'lowercase'
- item.album = 'album'
+ item.title = "another"
+ item.artist = "lowercase"
+ item.album = "album"
item.year = 2001
item.comp = True
item.flex1 = "flex1"
@@ -413,50 +413,50 @@ def tearDown(self):
super().tearDown()
def test_smart_artist_case_insensitive(self):
- config['sort_case_insensitive'] = True
- q = 'artist+'
+ config["sort_case_insensitive"] = True
+ q = "artist+"
results = list(self.lib.items(q))
- self.assertEqual(results[0].artist, 'lowercase')
- self.assertEqual(results[1].artist, 'One')
+ self.assertEqual(results[0].artist, "lowercase")
+ self.assertEqual(results[1].artist, "One")
def test_smart_artist_case_sensitive(self):
- config['sort_case_insensitive'] = False
- q = 'artist+'
+ config["sort_case_insensitive"] = False
+ q = "artist+"
results = list(self.lib.items(q))
- self.assertEqual(results[0].artist, 'One')
- self.assertEqual(results[-1].artist, 'lowercase')
+ self.assertEqual(results[0].artist, "One")
+ self.assertEqual(results[-1].artist, "lowercase")
def test_fixed_field_case_insensitive(self):
- config['sort_case_insensitive'] = True
- q = 'album+'
+ config["sort_case_insensitive"] = True
+ q = "album+"
results = list(self.lib.albums(q))
- self.assertEqual(results[0].album, 'album')
- self.assertEqual(results[1].album, 'Album A')
+ self.assertEqual(results[0].album, "album")
+ self.assertEqual(results[1].album, "Album A")
def test_fixed_field_case_sensitive(self):
- config['sort_case_insensitive'] = False
- q = 'album+'
+ config["sort_case_insensitive"] = False
+ q = "album+"
results = list(self.lib.albums(q))
- self.assertEqual(results[0].album, 'Album A')
- self.assertEqual(results[-1].album, 'album')
+ self.assertEqual(results[0].album, "Album A")
+ self.assertEqual(results[-1].album, "album")
def test_flex_field_case_insensitive(self):
- config['sort_case_insensitive'] = True
- q = 'flex1+'
+ config["sort_case_insensitive"] = True
+ q = "flex1+"
results = list(self.lib.items(q))
- self.assertEqual(results[0].flex1, 'flex1')
- self.assertEqual(results[1].flex1, 'Flex1-0')
+ self.assertEqual(results[0].flex1, "flex1")
+ self.assertEqual(results[1].flex1, "Flex1-0")
def test_flex_field_case_sensitive(self):
- config['sort_case_insensitive'] = False
- q = 'flex1+'
+ config["sort_case_insensitive"] = False
+ q = "flex1+"
results = list(self.lib.items(q))
- self.assertEqual(results[0].flex1, 'Flex1-0')
- self.assertEqual(results[-1].flex1, 'flex1')
+ self.assertEqual(results[0].flex1, "Flex1-0")
+ self.assertEqual(results[-1].flex1, "flex1")
def test_case_sensitive_only_affects_text(self):
- config['sort_case_insensitive'] = True
- q = 'track+'
+ config["sort_case_insensitive"] = True
+ q = "track+"
results = list(self.lib.items(q))
# If the numerical values were sorted as strings,
# then ['1', '10', '2'] would be valid.
@@ -470,10 +470,9 @@ class NonExistingFieldTest(DummyDataTestCase):
"""Test sorting by non-existing fields"""
def test_non_existing_fields_not_fail(self):
- qs = ['foo+', 'foo-', '--', '-+', '+-',
- '++', '-foo-', '-foo+', '---']
+ qs = ["foo+", "foo-", "--", "-+", "+-", "++", "-foo-", "-foo+", "---"]
- q0 = 'foo+'
+ q0 = "foo+"
results0 = list(self.lib.items(q0))
for q1 in qs:
results1 = list(self.lib.items(q1))
@@ -481,16 +480,16 @@ def test_non_existing_fields_not_fail(self):
self.assertEqual(r1.id, r2.id)
def test_combined_non_existing_field_asc(self):
- all_results = list(self.lib.items('id+'))
- q = 'foo+ id+'
+ all_results = list(self.lib.items("id+"))
+ q = "foo+ id+"
results = list(self.lib.items(q))
self.assertEqual(len(all_results), len(results))
for r1, r2 in zip(all_results, results):
self.assertEqual(r1.id, r2.id)
def test_combined_non_existing_field_desc(self):
- all_results = list(self.lib.items('id+'))
- q = 'foo- id+'
+ all_results = list(self.lib.items("id+"))
+ q = "foo- id+"
results = list(self.lib.items(q))
self.assertEqual(len(all_results), len(results))
for r1, r2 in zip(all_results, results):
@@ -499,21 +498,25 @@ def test_combined_non_existing_field_desc(self):
def test_field_present_in_some_items(self):
"""Test ordering by a field not present on all items."""
# append 'foo' to two to items (1,2)
- items = self.lib.items('id+')
+ items = self.lib.items("id+")
ids = [i.id for i in items]
- items[1].foo = 'bar1'
- items[2].foo = 'bar2'
+ items[1].foo = "bar1"
+ items[2].foo = "bar2"
items[1].store()
items[2].store()
- results_asc = list(self.lib.items('foo+ id+'))
- self.assertEqual([i.id for i in results_asc],
- # items without field first
- [ids[0], ids[3], ids[1], ids[2]])
- results_desc = list(self.lib.items('foo- id+'))
- self.assertEqual([i.id for i in results_desc],
- # items without field last
- [ids[2], ids[1], ids[0], ids[3]])
+ results_asc = list(self.lib.items("foo+ id+"))
+ self.assertEqual(
+ [i.id for i in results_asc],
+ # items without field first
+ [ids[0], ids[3], ids[1], ids[2]],
+ )
+ results_desc = list(self.lib.items("foo- id+"))
+ self.assertEqual(
+ [i.id for i in results_desc],
+ # items without field last
+ [ids[2], ids[1], ids[0], ids[3]],
+ )
def test_negation_interaction(self):
"""Test the handling of negation and sorting together.
@@ -521,18 +524,18 @@ def test_negation_interaction(self):
If a string ends with a sorting suffix, it takes precedence over the
NotQuery parsing.
"""
- query, sort = beets.library.parse_query_string('-bar+',
- beets.library.Item)
+ query, sort = beets.library.parse_query_string(
+ "-bar+", beets.library.Item
+ )
self.assertEqual(len(query.subqueries), 1)
- self.assertTrue(isinstance(query.subqueries[0],
- dbcore.query.TrueQuery))
+ self.assertTrue(isinstance(query.subqueries[0], dbcore.query.TrueQuery))
self.assertTrue(isinstance(sort, dbcore.query.SlowFieldSort))
- self.assertEqual(sort.field, '-bar')
+ self.assertEqual(sort.field, "-bar")
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_template.py b/test/test_template.py
index adc9b60d10..24a5351dfd 100644
--- a/test/test_template.py
+++ b/test/test_template.py
@@ -16,6 +16,7 @@
"""
import unittest
+
from beets.util import functemplate
@@ -30,13 +31,13 @@ def _normexpr(expr):
textbuf.append(part)
else:
if textbuf:
- text = ''.join(textbuf)
+ text = "".join(textbuf)
if text:
yield text
textbuf = []
yield part
if textbuf:
- text = ''.join(textbuf)
+ text = "".join(textbuf)
if text:
yield text
@@ -48,141 +49,149 @@ def _normparse(text):
class ParseTest(unittest.TestCase):
def test_empty_string(self):
- self.assertEqual(list(_normparse('')), [])
+ self.assertEqual(list(_normparse("")), [])
def _assert_symbol(self, obj, ident):
- """Assert that an object is a Symbol with the given identifier.
- """
- self.assertTrue(isinstance(obj, functemplate.Symbol),
- "not a Symbol: %s" % repr(obj))
- self.assertEqual(obj.ident, ident,
- "wrong identifier: %s vs. %s" %
- (repr(obj.ident), repr(ident)))
+ """Assert that an object is a Symbol with the given identifier."""
+ self.assertTrue(
+ isinstance(obj, functemplate.Symbol), "not a Symbol: %s" % repr(obj)
+ )
+ self.assertEqual(
+ obj.ident,
+ ident,
+ "wrong identifier: %s vs. %s" % (repr(obj.ident), repr(ident)),
+ )
def _assert_call(self, obj, ident, numargs):
"""Assert that an object is a Call with the given identifier and
argument count.
"""
- self.assertTrue(isinstance(obj, functemplate.Call),
- "not a Call: %s" % repr(obj))
- self.assertEqual(obj.ident, ident,
- "wrong identifier: %s vs. %s" %
- (repr(obj.ident), repr(ident)))
- self.assertEqual(len(obj.args), numargs,
- "wrong argument count in %s: %i vs. %i" %
- (repr(obj.ident), len(obj.args), numargs))
+ self.assertTrue(
+ isinstance(obj, functemplate.Call), "not a Call: %s" % repr(obj)
+ )
+ self.assertEqual(
+ obj.ident,
+ ident,
+ "wrong identifier: %s vs. %s" % (repr(obj.ident), repr(ident)),
+ )
+ self.assertEqual(
+ len(obj.args),
+ numargs,
+ "wrong argument count in %s: %i vs. %i"
+ % (repr(obj.ident), len(obj.args), numargs),
+ )
def test_plain_text(self):
- self.assertEqual(list(_normparse('hello world')), ['hello world'])
+ self.assertEqual(list(_normparse("hello world")), ["hello world"])
def test_escaped_character_only(self):
- self.assertEqual(list(_normparse('$$')), ['$'])
+ self.assertEqual(list(_normparse("$$")), ["$"])
def test_escaped_character_in_text(self):
- self.assertEqual(list(_normparse('a $$ b')), ['a $ b'])
+ self.assertEqual(list(_normparse("a $$ b")), ["a $ b"])
def test_escaped_character_at_start(self):
- self.assertEqual(list(_normparse('$$ hello')), ['$ hello'])
+ self.assertEqual(list(_normparse("$$ hello")), ["$ hello"])
def test_escaped_character_at_end(self):
- self.assertEqual(list(_normparse('hello $$')), ['hello $'])
+ self.assertEqual(list(_normparse("hello $$")), ["hello $"])
def test_escaped_function_delim(self):
- self.assertEqual(list(_normparse('a $% b')), ['a % b'])
+ self.assertEqual(list(_normparse("a $% b")), ["a % b"])
def test_escaped_sep(self):
- self.assertEqual(list(_normparse('a $, b')), ['a , b'])
+ self.assertEqual(list(_normparse("a $, b")), ["a , b"])
def test_escaped_close_brace(self):
- self.assertEqual(list(_normparse('a $} b')), ['a } b'])
+ self.assertEqual(list(_normparse("a $} b")), ["a } b"])
def test_bare_value_delim_kept_intact(self):
- self.assertEqual(list(_normparse('a $ b')), ['a $ b'])
+ self.assertEqual(list(_normparse("a $ b")), ["a $ b"])
def test_bare_function_delim_kept_intact(self):
- self.assertEqual(list(_normparse('a % b')), ['a % b'])
+ self.assertEqual(list(_normparse("a % b")), ["a % b"])
def test_bare_opener_kept_intact(self):
- self.assertEqual(list(_normparse('a { b')), ['a { b'])
+ self.assertEqual(list(_normparse("a { b")), ["a { b"])
def test_bare_closer_kept_intact(self):
- self.assertEqual(list(_normparse('a } b')), ['a } b'])
+ self.assertEqual(list(_normparse("a } b")), ["a } b"])
def test_bare_sep_kept_intact(self):
- self.assertEqual(list(_normparse('a , b')), ['a , b'])
+ self.assertEqual(list(_normparse("a , b")), ["a , b"])
def test_symbol_alone(self):
- parts = list(_normparse('$foo'))
+ parts = list(_normparse("$foo"))
self.assertEqual(len(parts), 1)
self._assert_symbol(parts[0], "foo")
def test_symbol_in_text(self):
- parts = list(_normparse('hello $foo world'))
+ parts = list(_normparse("hello $foo world"))
self.assertEqual(len(parts), 3)
- self.assertEqual(parts[0], 'hello ')
+ self.assertEqual(parts[0], "hello ")
self._assert_symbol(parts[1], "foo")
- self.assertEqual(parts[2], ' world')
+ self.assertEqual(parts[2], " world")
def test_symbol_with_braces(self):
- parts = list(_normparse('hello${foo}world'))
+ parts = list(_normparse("hello${foo}world"))
self.assertEqual(len(parts), 3)
- self.assertEqual(parts[0], 'hello')
+ self.assertEqual(parts[0], "hello")
self._assert_symbol(parts[1], "foo")
- self.assertEqual(parts[2], 'world')
+ self.assertEqual(parts[2], "world")
def test_unclosed_braces_symbol(self):
- self.assertEqual(list(_normparse('a ${ b')), ['a ${ b'])
+ self.assertEqual(list(_normparse("a ${ b")), ["a ${ b"])
def test_empty_braces_symbol(self):
- self.assertEqual(list(_normparse('a ${} b')), ['a ${} b'])
+ self.assertEqual(list(_normparse("a ${} b")), ["a ${} b"])
def test_call_without_args_at_end(self):
- self.assertEqual(list(_normparse('foo %bar')), ['foo %bar'])
+ self.assertEqual(list(_normparse("foo %bar")), ["foo %bar"])
def test_call_without_args(self):
- self.assertEqual(list(_normparse('foo %bar baz')), ['foo %bar baz'])
+ self.assertEqual(list(_normparse("foo %bar baz")), ["foo %bar baz"])
def test_call_with_unclosed_args(self):
- self.assertEqual(list(_normparse('foo %bar{ baz')),
- ['foo %bar{ baz'])
+ self.assertEqual(list(_normparse("foo %bar{ baz")), ["foo %bar{ baz"])
def test_call_with_unclosed_multiple_args(self):
- self.assertEqual(list(_normparse('foo %bar{bar,bar baz')),
- ['foo %bar{bar,bar baz'])
+ self.assertEqual(
+ list(_normparse("foo %bar{bar,bar baz")), ["foo %bar{bar,bar baz"]
+ )
def test_call_empty_arg(self):
- parts = list(_normparse('%foo{}'))
+ parts = list(_normparse("%foo{}"))
self.assertEqual(len(parts), 1)
self._assert_call(parts[0], "foo", 1)
self.assertEqual(list(_normexpr(parts[0].args[0])), [])
def test_call_single_arg(self):
- parts = list(_normparse('%foo{bar}'))
+ parts = list(_normparse("%foo{bar}"))
self.assertEqual(len(parts), 1)
self._assert_call(parts[0], "foo", 1)
- self.assertEqual(list(_normexpr(parts[0].args[0])), ['bar'])
+ self.assertEqual(list(_normexpr(parts[0].args[0])), ["bar"])
def test_call_two_args(self):
- parts = list(_normparse('%foo{bar,baz}'))
+ parts = list(_normparse("%foo{bar,baz}"))
self.assertEqual(len(parts), 1)
self._assert_call(parts[0], "foo", 2)
- self.assertEqual(list(_normexpr(parts[0].args[0])), ['bar'])
- self.assertEqual(list(_normexpr(parts[0].args[1])), ['baz'])
+ self.assertEqual(list(_normexpr(parts[0].args[0])), ["bar"])
+ self.assertEqual(list(_normexpr(parts[0].args[1])), ["baz"])
def test_call_with_escaped_sep(self):
- parts = list(_normparse('%foo{bar$,baz}'))
+ parts = list(_normparse("%foo{bar$,baz}"))
self.assertEqual(len(parts), 1)
self._assert_call(parts[0], "foo", 1)
- self.assertEqual(list(_normexpr(parts[0].args[0])), ['bar,baz'])
+ self.assertEqual(list(_normexpr(parts[0].args[0])), ["bar,baz"])
def test_call_with_escaped_close(self):
- parts = list(_normparse('%foo{bar$}baz}'))
+ parts = list(_normparse("%foo{bar$}baz}"))
self.assertEqual(len(parts), 1)
self._assert_call(parts[0], "foo", 1)
- self.assertEqual(list(_normexpr(parts[0].args[0])), ['bar}baz'])
+ self.assertEqual(list(_normexpr(parts[0].args[0])), ["bar}baz"])
def test_call_with_symbol_argument(self):
- parts = list(_normparse('%foo{$bar,baz}'))
+ parts = list(_normparse("%foo{$bar,baz}"))
self.assertEqual(len(parts), 1)
self._assert_call(parts[0], "foo", 2)
arg_parts = list(_normexpr(parts[0].args[0]))
@@ -191,7 +200,7 @@ def test_call_with_symbol_argument(self):
self.assertEqual(list(_normexpr(parts[0].args[1])), ["baz"])
def test_call_with_nested_call_argument(self):
- parts = list(_normparse('%foo{%bar{},baz}'))
+ parts = list(_normparse("%foo{%bar{},baz}"))
self.assertEqual(len(parts), 1)
self._assert_call(parts[0], "foo", 2)
arg_parts = list(_normexpr(parts[0].args[0]))
@@ -200,45 +209,45 @@ def test_call_with_nested_call_argument(self):
self.assertEqual(list(_normexpr(parts[0].args[1])), ["baz"])
def test_nested_call_with_argument(self):
- parts = list(_normparse('%foo{%bar{baz}}'))
+ parts = list(_normparse("%foo{%bar{baz}}"))
self.assertEqual(len(parts), 1)
self._assert_call(parts[0], "foo", 1)
arg_parts = list(_normexpr(parts[0].args[0]))
self.assertEqual(len(arg_parts), 1)
self._assert_call(arg_parts[0], "bar", 1)
- self.assertEqual(list(_normexpr(arg_parts[0].args[0])), ['baz'])
+ self.assertEqual(list(_normexpr(arg_parts[0].args[0])), ["baz"])
def test_sep_before_call_two_args(self):
- parts = list(_normparse('hello, %foo{bar,baz}'))
+ parts = list(_normparse("hello, %foo{bar,baz}"))
self.assertEqual(len(parts), 2)
- self.assertEqual(parts[0], 'hello, ')
+ self.assertEqual(parts[0], "hello, ")
self._assert_call(parts[1], "foo", 2)
- self.assertEqual(list(_normexpr(parts[1].args[0])), ['bar'])
- self.assertEqual(list(_normexpr(parts[1].args[1])), ['baz'])
+ self.assertEqual(list(_normexpr(parts[1].args[0])), ["bar"])
+ self.assertEqual(list(_normexpr(parts[1].args[1])), ["baz"])
def test_sep_with_symbols(self):
- parts = list(_normparse('hello,$foo,$bar'))
+ parts = list(_normparse("hello,$foo,$bar"))
self.assertEqual(len(parts), 4)
- self.assertEqual(parts[0], 'hello,')
+ self.assertEqual(parts[0], "hello,")
self._assert_symbol(parts[1], "foo")
- self.assertEqual(parts[2], ',')
+ self.assertEqual(parts[2], ",")
self._assert_symbol(parts[3], "bar")
def test_newline_at_end(self):
- parts = list(_normparse('foo\n'))
+ parts = list(_normparse("foo\n"))
self.assertEqual(len(parts), 1)
- self.assertEqual(parts[0], 'foo\n')
+ self.assertEqual(parts[0], "foo\n")
class EvalTest(unittest.TestCase):
def _eval(self, template):
values = {
- 'foo': 'bar',
- 'baz': 'BaR',
+ "foo": "bar",
+ "baz": "BaR",
}
functions = {
- 'lower': str.lower,
- 'len': len,
+ "lower": str.lower,
+ "len": len,
}
return functemplate.Template(template).substitute(values, functions)
@@ -286,5 +295,6 @@ def test_function_call_with_empty_arg(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_ui.py b/test/test_ui.py
index f8ecd5d491..cae8614803 100644
--- a/test/test_ui.py
+++ b/test/test_ui.py
@@ -16,95 +16,91 @@
"""
import os
-import shutil
+import platform
import re
+import shutil
import subprocess
-import platform
import sys
import unittest
-
-from unittest.mock import patch, Mock
from test import _common
-from test.helper import capture_stdout, has_program, TestHelper, control_stdin
+from test.helper import TestHelper, capture_stdout, control_stdin, has_program
+from unittest.mock import Mock, patch
-from beets import library
-from beets import ui
-from beets.ui import commands
-from beets import autotag
-from beets.autotag.match import distance
-from mediafile import MediaFile
-from beets import config
-from beets import plugins
from confuse import ConfigError
-from beets import util
-from beets.util import syspath, MoveOperation
+from mediafile import MediaFile
+
+from beets import autotag, config, library, plugins, ui, util
+from beets.autotag.match import distance
+from beets.ui import commands
+from beets.util import MoveOperation, syspath
class ListTest(unittest.TestCase):
def setUp(self):
- self.lib = library.Library(':memory:')
+ self.lib = library.Library(":memory:")
self.item = _common.item()
- self.item.path = 'xxx/yyy'
+ self.item.path = "xxx/yyy"
self.lib.add(self.item)
self.lib.add_album([self.item])
- def _run_list(self, query='', album=False, path=False, fmt=''):
+ def _run_list(self, query="", album=False, path=False, fmt=""):
with capture_stdout() as stdout:
commands.list_items(self.lib, query, album, fmt)
return stdout
def test_list_outputs_item(self):
stdout = self._run_list()
- self.assertIn('the title', stdout.getvalue())
+ self.assertIn("the title", stdout.getvalue())
def test_list_unicode_query(self):
- self.item.title = 'na\xefve'
+ self.item.title = "na\xefve"
self.item.store()
self.lib._connection().commit()
- stdout = self._run_list(['na\xefve'])
+ stdout = self._run_list(["na\xefve"])
out = stdout.getvalue()
- self.assertTrue('na\xefve' in out)
+ self.assertTrue("na\xefve" in out)
def test_list_item_path(self):
- stdout = self._run_list(fmt='$path')
- self.assertEqual(stdout.getvalue().strip(), 'xxx/yyy')
+ stdout = self._run_list(fmt="$path")
+ self.assertEqual(stdout.getvalue().strip(), "xxx/yyy")
def test_list_album_outputs_something(self):
stdout = self._run_list(album=True)
self.assertGreater(len(stdout.getvalue()), 0)
def test_list_album_path(self):
- stdout = self._run_list(album=True, fmt='$path')
- self.assertEqual(stdout.getvalue().strip(), 'xxx')
+ stdout = self._run_list(album=True, fmt="$path")
+ self.assertEqual(stdout.getvalue().strip(), "xxx")
def test_list_album_omits_title(self):
stdout = self._run_list(album=True)
- self.assertNotIn('the title', stdout.getvalue())
+ self.assertNotIn("the title", stdout.getvalue())
def test_list_uses_track_artist(self):
stdout = self._run_list()
- self.assertIn('the artist', stdout.getvalue())
- self.assertNotIn('the album artist', stdout.getvalue())
+ self.assertIn("the artist", stdout.getvalue())
+ self.assertNotIn("the album artist", stdout.getvalue())
def test_list_album_uses_album_artist(self):
stdout = self._run_list(album=True)
- self.assertNotIn('the artist', stdout.getvalue())
- self.assertIn('the album artist', stdout.getvalue())
+ self.assertNotIn("the artist", stdout.getvalue())
+ self.assertIn("the album artist", stdout.getvalue())
def test_list_item_format_artist(self):
- stdout = self._run_list(fmt='$artist')
- self.assertIn('the artist', stdout.getvalue())
+ stdout = self._run_list(fmt="$artist")
+ self.assertIn("the artist", stdout.getvalue())
def test_list_item_format_multiple(self):
- stdout = self._run_list(fmt='$artist - $album - $year')
- self.assertEqual('the artist - the album - 0001',
- stdout.getvalue().strip())
+ stdout = self._run_list(fmt="$artist - $album - $year")
+ self.assertEqual(
+ "the artist - the album - 0001", stdout.getvalue().strip()
+ )
def test_list_album_format(self):
- stdout = self._run_list(album=True, fmt='$genre')
- self.assertIn('the genre', stdout.getvalue())
- self.assertNotIn('the album', stdout.getvalue())
+ stdout = self._run_list(album=True, fmt="$genre")
+ self.assertIn("the genre", stdout.getvalue())
+ self.assertNotIn("the album", stdout.getvalue())
class RemoveTest(_common.TestCase, TestHelper):
@@ -113,38 +109,38 @@ def setUp(self):
self.io.install()
- self.libdir = os.path.join(self.temp_dir, b'testlibdir')
+ self.libdir = os.path.join(self.temp_dir, b"testlibdir")
os.mkdir(syspath(self.libdir))
# Copy a file into the library.
- self.lib = library.Library(':memory:', self.libdir)
- self.item_path = os.path.join(_common.RSRC, b'full.mp3')
+ self.lib = library.Library(":memory:", self.libdir)
+ self.item_path = os.path.join(_common.RSRC, b"full.mp3")
self.i = library.Item.from_path(self.item_path)
self.lib.add(self.i)
self.i.move(operation=MoveOperation.COPY)
def test_remove_items_no_delete(self):
- self.io.addinput('y')
- commands.remove_items(self.lib, '', False, False, False)
+ self.io.addinput("y")
+ commands.remove_items(self.lib, "", False, False, False)
items = self.lib.items()
self.assertEqual(len(list(items)), 0)
self.assertExists(self.i.path)
def test_remove_items_with_delete(self):
- self.io.addinput('y')
- commands.remove_items(self.lib, '', False, True, False)
+ self.io.addinput("y")
+ commands.remove_items(self.lib, "", False, True, False)
items = self.lib.items()
self.assertEqual(len(list(items)), 0)
self.assertNotExists(self.i.path)
def test_remove_items_with_force_no_delete(self):
- commands.remove_items(self.lib, '', False, False, True)
+ commands.remove_items(self.lib, "", False, False, True)
items = self.lib.items()
self.assertEqual(len(list(items)), 0)
self.assertExists(self.i.path)
def test_remove_items_with_force_delete(self):
- commands.remove_items(self.lib, '', False, True, True)
+ commands.remove_items(self.lib, "", False, True, True)
items = self.lib.items()
self.assertEqual(len(list(items)), 0)
self.assertNotExists(self.i.path)
@@ -154,9 +150,9 @@ def test_remove_items_select_with_delete(self):
self.lib.add(i2)
i2.move(operation=MoveOperation.COPY)
- for s in ('s', 'y', 'n'):
+ for s in ("s", "y", "n"):
self.io.addinput(s)
- commands.remove_items(self.lib, '', False, True, False)
+ commands.remove_items(self.lib, "", False, True, False)
items = self.lib.items()
self.assertEqual(len(list(items)), 1)
# There is probably no guarantee that the items are queried in any
@@ -176,9 +172,9 @@ def test_remove_albums_select_with_delete(self):
items = self.lib.items()
self.assertEqual(len(list(items)), 3)
- for s in ('s', 'y', 'n'):
+ for s in ("s", "y", "n"):
self.io.addinput(s)
- commands.remove_items(self.lib, '', True, True, False)
+ commands.remove_items(self.lib, "", True, True, False)
items = self.lib.items()
self.assertEqual(len(list(items)), 2) # incl. the item from setUp()
# See test_remove_items_select_with_delete()
@@ -189,7 +185,6 @@ def test_remove_albums_select_with_delete(self):
class ModifyTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_beets()
self.album = self.add_album_fixture()
@@ -200,29 +195,29 @@ def tearDown(self):
def modify_inp(self, inp, *args):
with control_stdin(inp):
- self.run_command('modify', *args)
+ self.run_command("modify", *args)
def modify(self, *args):
- self.modify_inp('y', *args)
+ self.modify_inp("y", *args)
# Item tests
def test_modify_item(self):
self.modify("title=newTitle")
item = self.lib.items().get()
- self.assertEqual(item.title, 'newTitle')
+ self.assertEqual(item.title, "newTitle")
def test_modify_item_abort(self):
item = self.lib.items().get()
title = item.title
- self.modify_inp('n', "title=newTitle")
+ self.modify_inp("n", "title=newTitle")
item = self.lib.items().get()
self.assertEqual(item.title, title)
def test_modify_item_no_change(self):
title = "Tracktitle"
item = self.add_item_fixture(title=title)
- self.modify_inp('y', "title", f"title={title}")
+ self.modify_inp("y", "title", f"title={title}")
item = self.lib.items(title).get()
self.assertEqual(item.title, title)
@@ -230,30 +225,30 @@ def test_modify_write_tags(self):
self.modify("title=newTitle")
item = self.lib.items().get()
item.read()
- self.assertEqual(item.title, 'newTitle')
+ self.assertEqual(item.title, "newTitle")
def test_modify_dont_write_tags(self):
self.modify("--nowrite", "title=newTitle")
item = self.lib.items().get()
item.read()
- self.assertNotEqual(item.title, 'newTitle')
+ self.assertNotEqual(item.title, "newTitle")
def test_move(self):
self.modify("title=newTitle")
item = self.lib.items().get()
- self.assertIn(b'newTitle', item.path)
+ self.assertIn(b"newTitle", item.path)
def test_not_move(self):
self.modify("--nomove", "title=newTitle")
item = self.lib.items().get()
- self.assertNotIn(b'newTitle', item.path)
+ self.assertNotIn(b"newTitle", item.path)
def test_no_write_no_move(self):
self.modify("--nomove", "--nowrite", "title=newTitle")
item = self.lib.items().get()
item.read()
- self.assertNotIn(b'newTitle', item.path)
- self.assertNotEqual(item.title, 'newTitle')
+ self.assertNotIn(b"newTitle", item.path)
+ self.assertNotEqual(item.title, "newTitle")
def test_update_mtime(self):
item = self.item
@@ -277,11 +272,12 @@ def test_selective_modify(self):
original_artist = "composer"
new_artist = "coverArtist"
for i in range(0, 10):
- self.add_item_fixture(title=f"{title}{i}",
- artist=original_artist,
- album=album)
- self.modify_inp('s\ny\ny\ny\nn\nn\ny\ny\ny\ny\nn',
- title, f"artist={new_artist}")
+ self.add_item_fixture(
+ title=f"{title}{i}", artist=original_artist, album=album
+ )
+ self.modify_inp(
+ "s\ny\ny\ny\nn\nn\ny\ny\ny\ny\nn", title, f"artist={new_artist}"
+ )
original_items = self.lib.items(f"artist:{original_artist}")
new_items = self.lib.items(f"artist:{new_artist}")
self.assertEqual(len(list(original_items)), 3)
@@ -289,9 +285,9 @@ def test_selective_modify(self):
def test_modify_formatted(self):
for i in range(0, 3):
- self.add_item_fixture(title=f"title{i}",
- artist="artist",
- album="album")
+ self.add_item_fixture(
+ title=f"title{i}", artist="artist", album="album"
+ )
items = list(self.lib.items())
self.modify("title=${title} - append")
for item in items:
@@ -304,31 +300,31 @@ def test_modify_formatted(self):
def test_modify_album(self):
self.modify("--album", "album=newAlbum")
album = self.lib.albums().get()
- self.assertEqual(album.album, 'newAlbum')
+ self.assertEqual(album.album, "newAlbum")
def test_modify_album_write_tags(self):
self.modify("--album", "album=newAlbum")
item = self.lib.items().get()
item.read()
- self.assertEqual(item.album, 'newAlbum')
+ self.assertEqual(item.album, "newAlbum")
def test_modify_album_dont_write_tags(self):
self.modify("--album", "--nowrite", "album=newAlbum")
item = self.lib.items().get()
item.read()
- self.assertEqual(item.album, 'the album')
+ self.assertEqual(item.album, "the album")
def test_album_move(self):
self.modify("--album", "album=newAlbum")
item = self.lib.items().get()
item.read()
- self.assertIn(b'newAlbum', item.path)
+ self.assertIn(b"newAlbum", item.path)
def test_album_not_move(self):
self.modify("--nomove", "--album", "album=newAlbum")
item = self.lib.items().get()
item.read()
- self.assertNotIn(b'newAlbum', item.path)
+ self.assertNotIn(b"newAlbum", item.path)
def test_modify_album_formatted(self):
item = self.lib.items().get()
@@ -343,63 +339,66 @@ def test_write_initial_key_tag(self):
self.modify("initial_key=C#m")
item = self.lib.items().get()
mediafile = MediaFile(syspath(item.path))
- self.assertEqual(mediafile.initial_key, 'C#m')
+ self.assertEqual(mediafile.initial_key, "C#m")
def test_set_flexattr(self):
self.modify("flexattr=testAttr")
item = self.lib.items().get()
- self.assertEqual(item.flexattr, 'testAttr')
+ self.assertEqual(item.flexattr, "testAttr")
def test_remove_flexattr(self):
item = self.lib.items().get()
- item.flexattr = 'testAttr'
+ item.flexattr = "testAttr"
item.store()
self.modify("flexattr!")
item = self.lib.items().get()
self.assertNotIn("flexattr", item)
- @unittest.skip('not yet implemented')
+ @unittest.skip("not yet implemented")
def test_delete_initial_key_tag(self):
item = self.lib.items().get()
- item.initial_key = 'C#m'
+ item.initial_key = "C#m"
item.write()
item.store()
mediafile = MediaFile(syspath(item.path))
- self.assertEqual(mediafile.initial_key, 'C#m')
+ self.assertEqual(mediafile.initial_key, "C#m")
self.modify("initial_key!")
mediafile = MediaFile(syspath(item.path))
self.assertIsNone(mediafile.initial_key)
def test_arg_parsing_colon_query(self):
- (query, mods, dels) = commands.modify_parse_args(["title:oldTitle",
- "title=newTitle"])
+ (query, mods, dels) = commands.modify_parse_args(
+ ["title:oldTitle", "title=newTitle"]
+ )
self.assertEqual(query, ["title:oldTitle"])
self.assertEqual(mods, {"title": "newTitle"})
def test_arg_parsing_delete(self):
- (query, mods, dels) = commands.modify_parse_args(["title:oldTitle",
- "title!"])
+ (query, mods, dels) = commands.modify_parse_args(
+ ["title:oldTitle", "title!"]
+ )
self.assertEqual(query, ["title:oldTitle"])
self.assertEqual(dels, ["title"])
def test_arg_parsing_query_with_exclaimation(self):
- (query, mods, dels) = commands.modify_parse_args(["title:oldTitle!",
- "title=newTitle!"])
+ (query, mods, dels) = commands.modify_parse_args(
+ ["title:oldTitle!", "title=newTitle!"]
+ )
self.assertEqual(query, ["title:oldTitle!"])
self.assertEqual(mods, {"title": "newTitle!"})
def test_arg_parsing_equals_in_value(self):
- (query, mods, dels) = commands.modify_parse_args(["title:foo=bar",
- "title=newTitle"])
+ (query, mods, dels) = commands.modify_parse_args(
+ ["title:foo=bar", "title=newTitle"]
+ )
self.assertEqual(query, ["title:foo=bar"])
self.assertEqual(mods, {"title": "newTitle"})
class WriteTest(unittest.TestCase, TestHelper):
-
def setUp(self):
self.setup_beets()
@@ -407,11 +406,11 @@ def tearDown(self):
self.teardown_beets()
def write_cmd(self, *args):
- return self.run_with_output('write', *args)
+ return self.run_with_output("write", *args)
def test_update_mtime(self):
item = self.add_item_fixture()
- item['title'] = 'a new title'
+ item["title"] = "a new title"
item.store()
item = self.lib.items().get()
@@ -435,20 +434,19 @@ def test_non_metadata_field_unchanged(self):
output = self.write_cmd()
- self.assertEqual(output, '')
+ self.assertEqual(output, "")
def test_write_metadata_field(self):
item = self.add_item_fixture()
item.read()
old_title = item.title
- item.title = 'new title'
+ item.title = "new title"
item.store()
output = self.write_cmd()
- self.assertTrue(f'{old_title} -> new title'
- in output)
+ self.assertTrue(f"{old_title} -> new title" in output)
class MoveTest(_common.TestCase):
@@ -457,80 +455,88 @@ def setUp(self):
self.io.install()
- self.libdir = os.path.join(self.temp_dir, b'testlibdir')
+ self.libdir = os.path.join(self.temp_dir, b"testlibdir")
os.mkdir(syspath(self.libdir))
- self.itempath = os.path.join(self.libdir, b'srcfile')
+ self.itempath = os.path.join(self.libdir, b"srcfile")
shutil.copy(
- syspath(os.path.join(_common.RSRC, b'full.mp3')),
+ syspath(os.path.join(_common.RSRC, b"full.mp3")),
syspath(self.itempath),
)
# Add a file to the library but don't copy it in yet.
- self.lib = library.Library(':memory:', self.libdir)
+ self.lib = library.Library(":memory:", self.libdir)
self.i = library.Item.from_path(self.itempath)
self.lib.add(self.i)
self.album = self.lib.add_album([self.i])
# Alternate destination directory.
- self.otherdir = os.path.join(self.temp_dir, b'testotherdir')
-
- def _move(self, query=(), dest=None, copy=False, album=False,
- pretend=False, export=False):
- commands.move_items(self.lib, dest, query, copy, album, pretend,
- export=export)
+ self.otherdir = os.path.join(self.temp_dir, b"testotherdir")
+
+ def _move(
+ self,
+ query=(),
+ dest=None,
+ copy=False,
+ album=False,
+ pretend=False,
+ export=False,
+ ):
+ commands.move_items(
+ self.lib, dest, query, copy, album, pretend, export=export
+ )
def test_move_item(self):
self._move()
self.i.load()
- self.assertTrue(b'testlibdir' in self.i.path)
+ self.assertTrue(b"testlibdir" in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
def test_copy_item(self):
self._move(copy=True)
self.i.load()
- self.assertTrue(b'testlibdir' in self.i.path)
+ self.assertTrue(b"testlibdir" in self.i.path)
self.assertExists(self.i.path)
self.assertExists(self.itempath)
def test_move_album(self):
self._move(album=True)
self.i.load()
- self.assertTrue(b'testlibdir' in self.i.path)
+ self.assertTrue(b"testlibdir" in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
def test_copy_album(self):
self._move(copy=True, album=True)
self.i.load()
- self.assertTrue(b'testlibdir' in self.i.path)
+ self.assertTrue(b"testlibdir" in self.i.path)
self.assertExists(self.i.path)
self.assertExists(self.itempath)
def test_move_item_custom_dir(self):
self._move(dest=self.otherdir)
self.i.load()
- self.assertTrue(b'testotherdir' in self.i.path)
+ self.assertTrue(b"testotherdir" in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
def test_move_album_custom_dir(self):
self._move(dest=self.otherdir, album=True)
self.i.load()
- self.assertTrue(b'testotherdir' in self.i.path)
+ self.assertTrue(b"testotherdir" in self.i.path)
self.assertExists(self.i.path)
self.assertNotExists(self.itempath)
def test_pretend_move_item(self):
self._move(dest=self.otherdir, pretend=True)
self.i.load()
- self.assertIn(b'srcfile', self.i.path)
+ self.assertIn(b"srcfile", self.i.path)
def test_pretend_move_album(self):
self._move(album=True, pretend=True)
self.i.load()
- self.assertIn(b'srcfile', self.i.path)
+ self.assertIn(b"srcfile", self.i.path)
def test_export_item_custom_dir(self):
self._move(dest=self.otherdir, export=True)
@@ -547,7 +553,7 @@ def test_export_album_custom_dir(self):
def test_pretend_export_item(self):
self._move(dest=self.otherdir, pretend=True, export=True)
self.i.load()
- self.assertIn(b'srcfile', self.i.path)
+ self.assertIn(b"srcfile", self.i.path)
self.assertNotExists(self.otherdir)
@@ -557,12 +563,12 @@ def setUp(self):
self.io.install()
- self.libdir = os.path.join(self.temp_dir, b'testlibdir')
+ self.libdir = os.path.join(self.temp_dir, b"testlibdir")
# Copy a file into the library.
- self.lib = library.Library(':memory:', self.libdir)
- item_path = os.path.join(_common.RSRC, b'full.mp3')
- item_path_two = os.path.join(_common.RSRC, b'full.flac')
+ self.lib = library.Library(":memory:", self.libdir)
+ item_path = os.path.join(_common.RSRC, b"full.mp3")
+ item_path_two = os.path.join(_common.RSRC, b"full.flac")
self.i = library.Item.from_path(item_path)
self.i2 = library.Item.from_path(item_path_two)
self.lib.add(self.i)
@@ -572,20 +578,34 @@ def setUp(self):
self.album = self.lib.add_album([self.i, self.i2])
# Album art.
- artfile = os.path.join(self.temp_dir, b'testart.jpg')
+ artfile = os.path.join(self.temp_dir, b"testart.jpg")
_common.touch(artfile)
self.album.set_art(artfile)
self.album.store()
util.remove(artfile)
- def _update(self, query=(), album=False, move=False, reset_mtime=True,
- fields=None, exclude_fields=None):
- self.io.addinput('y')
+ def _update(
+ self,
+ query=(),
+ album=False,
+ move=False,
+ reset_mtime=True,
+ fields=None,
+ exclude_fields=None,
+ ):
+ self.io.addinput("y")
if reset_mtime:
self.i.mtime = 0
self.i.store()
- commands.update_items(self.lib, query, album, move, False,
- fields=fields, exclude_fields=exclude_fields)
+ commands.update_items(
+ self.lib,
+ query,
+ album,
+ move,
+ False,
+ fields=fields,
+ exclude_fields=exclude_fields,
+ )
def test_delete_removes_item(self):
self.assertTrue(list(self.lib.items()))
@@ -611,60 +631,60 @@ def test_delete_removes_album_art(self):
def test_modified_metadata_detected(self):
mf = MediaFile(syspath(self.i.path))
- mf.title = 'differentTitle'
+ mf.title = "differentTitle"
mf.save()
self._update()
item = self.lib.items().get()
- self.assertEqual(item.title, 'differentTitle')
+ self.assertEqual(item.title, "differentTitle")
def test_modified_metadata_moved(self):
mf = MediaFile(syspath(self.i.path))
- mf.title = 'differentTitle'
+ mf.title = "differentTitle"
mf.save()
self._update(move=True)
item = self.lib.items().get()
- self.assertTrue(b'differentTitle' in item.path)
+ self.assertTrue(b"differentTitle" in item.path)
def test_modified_metadata_not_moved(self):
mf = MediaFile(syspath(self.i.path))
- mf.title = 'differentTitle'
+ mf.title = "differentTitle"
mf.save()
self._update(move=False)
item = self.lib.items().get()
- self.assertTrue(b'differentTitle' not in item.path)
+ self.assertTrue(b"differentTitle" not in item.path)
def test_selective_modified_metadata_moved(self):
mf = MediaFile(syspath(self.i.path))
- mf.title = 'differentTitle'
- mf.genre = 'differentGenre'
+ mf.title = "differentTitle"
+ mf.genre = "differentGenre"
mf.save()
- self._update(move=True, fields=['title'])
+ self._update(move=True, fields=["title"])
item = self.lib.items().get()
- self.assertTrue(b'differentTitle' in item.path)
- self.assertNotEqual(item.genre, 'differentGenre')
+ self.assertTrue(b"differentTitle" in item.path)
+ self.assertNotEqual(item.genre, "differentGenre")
def test_selective_modified_metadata_not_moved(self):
mf = MediaFile(syspath(self.i.path))
- mf.title = 'differentTitle'
- mf.genre = 'differentGenre'
+ mf.title = "differentTitle"
+ mf.genre = "differentGenre"
mf.save()
- self._update(move=False, fields=['title'])
+ self._update(move=False, fields=["title"])
item = self.lib.items().get()
- self.assertTrue(b'differentTitle' not in item.path)
- self.assertNotEqual(item.genre, 'differentGenre')
+ self.assertTrue(b"differentTitle" not in item.path)
+ self.assertNotEqual(item.genre, "differentGenre")
def test_modified_album_metadata_moved(self):
mf = MediaFile(syspath(self.i.path))
- mf.album = 'differentAlbum'
+ mf.album = "differentAlbum"
mf.save()
self._update(move=True)
item = self.lib.items().get()
- self.assertTrue(b'differentAlbum' in item.path)
+ self.assertTrue(b"differentAlbum" in item.path)
def test_modified_album_metadata_art_moved(self):
artpath = self.album.artpath
mf = MediaFile(syspath(self.i.path))
- mf.album = 'differentAlbum'
+ mf.album = "differentAlbum"
mf.save()
self._update(move=True)
album = self.lib.albums()[0]
@@ -673,27 +693,27 @@ def test_modified_album_metadata_art_moved(self):
def test_selective_modified_album_metadata_moved(self):
mf = MediaFile(syspath(self.i.path))
- mf.album = 'differentAlbum'
- mf.genre = 'differentGenre'
+ mf.album = "differentAlbum"
+ mf.genre = "differentGenre"
mf.save()
- self._update(move=True, fields=['album'])
+ self._update(move=True, fields=["album"])
item = self.lib.items().get()
- self.assertTrue(b'differentAlbum' in item.path)
- self.assertNotEqual(item.genre, 'differentGenre')
+ self.assertTrue(b"differentAlbum" in item.path)
+ self.assertNotEqual(item.genre, "differentGenre")
def test_selective_modified_album_metadata_not_moved(self):
mf = MediaFile(syspath(self.i.path))
- mf.album = 'differentAlbum'
- mf.genre = 'differentGenre'
+ mf.album = "differentAlbum"
+ mf.genre = "differentGenre"
mf.save()
- self._update(move=True, fields=['genre'])
+ self._update(move=True, fields=["genre"])
item = self.lib.items().get()
- self.assertTrue(b'differentAlbum' not in item.path)
- self.assertEqual(item.genre, 'differentGenre')
+ self.assertTrue(b"differentAlbum" not in item.path)
+ self.assertEqual(item.genre, "differentGenre")
def test_mtime_match_skips_update(self):
mf = MediaFile(syspath(self.i.path))
- mf.title = 'differentTitle'
+ mf.title = "differentTitle"
mf.save()
# Make in-memory mtime match on-disk mtime.
@@ -702,7 +722,7 @@ def test_mtime_match_skips_update(self):
self._update(reset_mtime=False)
item = self.lib.items().get()
- self.assertEqual(item.title, 'full')
+ self.assertEqual(item.title, "full")
def test_multivalued_albumtype_roundtrip(self):
# https://github.com/beetbox/beets/issues/4528
@@ -720,22 +740,22 @@ def test_multivalued_albumtype_roundtrip(self):
album.try_sync(write=True, move=False)
album.load()
- self.assertEqual(album.albumtype, correct_albumtype)
+ self.assertEqual(album.albumtype, correct_albumtype)
self.assertEqual(album.albumtypes, correct_albumtypes)
self._update()
album.load()
- self.assertEqual(album.albumtype, correct_albumtype)
+ self.assertEqual(album.albumtype, correct_albumtype)
self.assertEqual(album.albumtypes, correct_albumtypes)
def test_modified_metadata_excluded(self):
mf = MediaFile(syspath(self.i.path))
- mf.lyrics = 'new lyrics'
+ mf.lyrics = "new lyrics"
mf.save()
- self._update(exclude_fields=['lyrics'])
+ self._update(exclude_fields=["lyrics"])
item = self.lib.items().get()
- self.assertNotEqual(item.lyrics, 'new lyrics')
+ self.assertNotEqual(item.lyrics, "new lyrics")
class PrintTest(_common.TestCase):
@@ -744,48 +764,47 @@ def setUp(self):
self.io.install()
def test_print_without_locale(self):
- lang = os.environ.get('LANG')
+ lang = os.environ.get("LANG")
if lang:
- del os.environ['LANG']
+ del os.environ["LANG"]
try:
- ui.print_('something')
+ ui.print_("something")
except TypeError:
- self.fail('TypeError during print')
+ self.fail("TypeError during print")
finally:
if lang:
- os.environ['LANG'] = lang
+ os.environ["LANG"] = lang
def test_print_with_invalid_locale(self):
- old_lang = os.environ.get('LANG')
- os.environ['LANG'] = ''
- old_ctype = os.environ.get('LC_CTYPE')
- os.environ['LC_CTYPE'] = 'UTF-8'
+ old_lang = os.environ.get("LANG")
+ os.environ["LANG"] = ""
+ old_ctype = os.environ.get("LC_CTYPE")
+ os.environ["LC_CTYPE"] = "UTF-8"
try:
- ui.print_('something')
+ ui.print_("something")
except ValueError:
- self.fail('ValueError during print')
+ self.fail("ValueError during print")
finally:
if old_lang:
- os.environ['LANG'] = old_lang
+ os.environ["LANG"] = old_lang
else:
- del os.environ['LANG']
+ del os.environ["LANG"]
if old_ctype:
- os.environ['LC_CTYPE'] = old_ctype
+ os.environ["LC_CTYPE"] = old_ctype
else:
- del os.environ['LC_CTYPE']
+ del os.environ["LC_CTYPE"]
class ImportTest(_common.TestCase):
def test_quiet_timid_disallowed(self):
- config['import']['quiet'] = True
- config['import']['timid'] = True
- self.assertRaises(ui.UserError, commands.import_files, None, [],
- None)
+ config["import"]["quiet"] = True
+ config["import"]["timid"] = True
+ self.assertRaises(ui.UserError, commands.import_files, None, [], None)
def test_parse_paths_from_logfile(self):
- if os.path.__name__ == 'ntpath':
+ if os.path.__name__ == "ntpath":
logfile_content = (
"import started Wed Jun 15 23:08:26 2022\n"
"asis C:\\music\\Beatles, The\\The Beatles; C:\\music\\Beatles, The\\The Beatles\\CD 01; C:\\music\\Beatles, The\\The Beatles\\CD 02\n" # noqa: E501
@@ -826,34 +845,36 @@ def setUp(self):
# Don't use the BEETSDIR from `helper`. Instead, we point the home
# directory there. Some tests will set `BEETSDIR` themselves.
- del os.environ['BEETSDIR']
- self._old_home = os.environ.get('HOME')
- os.environ['HOME'] = util.py3_path(self.temp_dir)
+ del os.environ["BEETSDIR"]
+ self._old_home = os.environ.get("HOME")
+ os.environ["HOME"] = util.py3_path(self.temp_dir)
# Also set APPDATA, the Windows equivalent of setting $HOME.
- self._old_appdata = os.environ.get('APPDATA')
- os.environ['APPDATA'] = \
- util.py3_path(os.path.join(self.temp_dir, b'AppData', b'Roaming'))
+ self._old_appdata = os.environ.get("APPDATA")
+ os.environ["APPDATA"] = util.py3_path(
+ os.path.join(self.temp_dir, b"AppData", b"Roaming")
+ )
self._orig_cwd = os.getcwd()
self.test_cmd = self._make_test_cmd()
commands.default_commands.append(self.test_cmd)
# Default user configuration
- if platform.system() == 'Windows':
+ if platform.system() == "Windows":
self.user_config_dir = os.path.join(
- self.temp_dir, b'AppData', b'Roaming', b'beets'
+ self.temp_dir, b"AppData", b"Roaming", b"beets"
)
else:
self.user_config_dir = os.path.join(
- self.temp_dir, b'.config', b'beets'
+ self.temp_dir, b".config", b"beets"
)
os.makedirs(syspath(self.user_config_dir))
- self.user_config_path = os.path.join(self.user_config_dir,
- b'config.yaml')
+ self.user_config_path = os.path.join(
+ self.user_config_dir, b"config.yaml"
+ )
# Custom BEETSDIR
- self.beetsdir = os.path.join(self.temp_dir, b'beetsdir')
+ self.beetsdir = os.path.join(self.temp_dir, b"beetsdir")
os.makedirs(syspath(self.beetsdir))
self._reset_config()
@@ -863,16 +884,16 @@ def tearDown(self):
commands.default_commands.pop()
os.chdir(syspath(self._orig_cwd))
if self._old_home is not None:
- os.environ['HOME'] = self._old_home
+ os.environ["HOME"] = self._old_home
if self._old_appdata is None:
- del os.environ['APPDATA']
+ del os.environ["APPDATA"]
else:
- os.environ['APPDATA'] = self._old_appdata
+ os.environ["APPDATA"] = self._old_appdata
self.unload_plugins()
self.teardown_beets()
def _make_test_cmd(self):
- test_cmd = ui.Subcommand('test', help='test')
+ test_cmd = ui.Subcommand("test", help="test")
def run(lib, options, args):
test_cmd.lib = lib
@@ -888,231 +909,234 @@ def _reset_config(self):
config._materialized = False
def write_config_file(self):
- return open(self.user_config_path, 'w')
+ return open(self.user_config_path, "w")
def test_paths_section_respected(self):
with self.write_config_file() as config:
- config.write('paths: {x: y}')
+ config.write("paths: {x: y}")
- self.run_command('test', lib=None)
+ self.run_command("test", lib=None)
key, template = self.test_cmd.lib.path_formats[0]
- self.assertEqual(key, 'x')
- self.assertEqual(template.original, 'y')
+ self.assertEqual(key, "x")
+ self.assertEqual(template.original, "y")
def test_default_paths_preserved(self):
default_formats = ui.get_path_formats()
self._reset_config()
with self.write_config_file() as config:
- config.write('paths: {x: y}')
- self.run_command('test', lib=None)
+ config.write("paths: {x: y}")
+ self.run_command("test", lib=None)
key, template = self.test_cmd.lib.path_formats[0]
- self.assertEqual(key, 'x')
- self.assertEqual(template.original, 'y')
- self.assertEqual(self.test_cmd.lib.path_formats[1:],
- default_formats)
+ self.assertEqual(key, "x")
+ self.assertEqual(template.original, "y")
+ self.assertEqual(self.test_cmd.lib.path_formats[1:], default_formats)
def test_nonexistant_db(self):
with self.write_config_file() as config:
- config.write('library: /xxx/yyy/not/a/real/path')
+ config.write("library: /xxx/yyy/not/a/real/path")
with self.assertRaises(ui.UserError):
- self.run_command('test', lib=None)
+ self.run_command("test", lib=None)
def test_user_config_file(self):
with self.write_config_file() as file:
- file.write('anoption: value')
+ file.write("anoption: value")
- self.run_command('test', lib=None)
- self.assertEqual(config['anoption'].get(), 'value')
+ self.run_command("test", lib=None)
+ self.assertEqual(config["anoption"].get(), "value")
def test_replacements_parsed(self):
with self.write_config_file() as config:
config.write("replace: {'[xy]': z}")
- self.run_command('test', lib=None)
+ self.run_command("test", lib=None)
replacements = self.test_cmd.lib.replacements
repls = [(p.pattern, s) for p, s in replacements] # Compare patterns.
- self.assertEqual(repls, [('[xy]', 'z')])
+ self.assertEqual(repls, [("[xy]", "z")])
def test_multiple_replacements_parsed(self):
with self.write_config_file() as config:
config.write("replace: {'[xy]': z, foo: bar}")
- self.run_command('test', lib=None)
+ self.run_command("test", lib=None)
replacements = self.test_cmd.lib.replacements
repls = [(p.pattern, s) for p, s in replacements]
- self.assertEqual(repls, [
- ('[xy]', 'z'),
- ('foo', 'bar'),
- ])
+ self.assertEqual(
+ repls,
+ [
+ ("[xy]", "z"),
+ ("foo", "bar"),
+ ],
+ )
def test_cli_config_option(self):
- config_path = os.path.join(self.temp_dir, b'config.yaml')
- with open(config_path, 'w') as file:
- file.write('anoption: value')
- self.run_command('--config', config_path, 'test', lib=None)
- self.assertEqual(config['anoption'].get(), 'value')
+ config_path = os.path.join(self.temp_dir, b"config.yaml")
+ with open(config_path, "w") as file:
+ file.write("anoption: value")
+ self.run_command("--config", config_path, "test", lib=None)
+ self.assertEqual(config["anoption"].get(), "value")
def test_cli_config_file_overwrites_user_defaults(self):
- with open(self.user_config_path, 'w') as file:
- file.write('anoption: value')
+ with open(self.user_config_path, "w") as file:
+ file.write("anoption: value")
- cli_config_path = os.path.join(self.temp_dir, b'config.yaml')
- with open(cli_config_path, 'w') as file:
- file.write('anoption: cli overwrite')
- self.run_command('--config', cli_config_path, 'test', lib=None)
- self.assertEqual(config['anoption'].get(), 'cli overwrite')
+ cli_config_path = os.path.join(self.temp_dir, b"config.yaml")
+ with open(cli_config_path, "w") as file:
+ file.write("anoption: cli overwrite")
+ self.run_command("--config", cli_config_path, "test", lib=None)
+ self.assertEqual(config["anoption"].get(), "cli overwrite")
def test_cli_config_file_overwrites_beetsdir_defaults(self):
- os.environ['BEETSDIR'] = util.py3_path(self.beetsdir)
- env_config_path = os.path.join(self.beetsdir, b'config.yaml')
- with open(env_config_path, 'w') as file:
- file.write('anoption: value')
-
- cli_config_path = os.path.join(self.temp_dir, b'config.yaml')
- with open(cli_config_path, 'w') as file:
- file.write('anoption: cli overwrite')
- self.run_command('--config', cli_config_path, 'test', lib=None)
- self.assertEqual(config['anoption'].get(), 'cli overwrite')
-
-# @unittest.skip('Difficult to implement with optparse')
-# def test_multiple_cli_config_files(self):
-# cli_config_path_1 = os.path.join(self.temp_dir, b'config.yaml')
-# cli_config_path_2 = os.path.join(self.temp_dir, b'config_2.yaml')
-#
-# with open(cli_config_path_1, 'w') as file:
-# file.write('first: value')
-#
-# with open(cli_config_path_2, 'w') as file:
-# file.write('second: value')
-#
-# self.run_command('--config', cli_config_path_1,
-# '--config', cli_config_path_2, 'test', lib=None)
-# self.assertEqual(config['first'].get(), 'value')
-# self.assertEqual(config['second'].get(), 'value')
-#
-# @unittest.skip('Difficult to implement with optparse')
-# def test_multiple_cli_config_overwrite(self):
-# cli_config_path = os.path.join(self.temp_dir, b'config.yaml')
-# cli_overwrite_config_path = os.path.join(self.temp_dir,
-# b'overwrite_config.yaml')
-#
-# with open(cli_config_path, 'w') as file:
-# file.write('anoption: value')
-#
-# with open(cli_overwrite_config_path, 'w') as file:
-# file.write('anoption: overwrite')
-#
-# self.run_command('--config', cli_config_path,
-# '--config', cli_overwrite_config_path, 'test')
-# self.assertEqual(config['anoption'].get(), 'cli overwrite')
+ os.environ["BEETSDIR"] = util.py3_path(self.beetsdir)
+ env_config_path = os.path.join(self.beetsdir, b"config.yaml")
+ with open(env_config_path, "w") as file:
+ file.write("anoption: value")
+
+ cli_config_path = os.path.join(self.temp_dir, b"config.yaml")
+ with open(cli_config_path, "w") as file:
+ file.write("anoption: cli overwrite")
+ self.run_command("--config", cli_config_path, "test", lib=None)
+ self.assertEqual(config["anoption"].get(), "cli overwrite")
+
+ # @unittest.skip('Difficult to implement with optparse')
+ # def test_multiple_cli_config_files(self):
+ # cli_config_path_1 = os.path.join(self.temp_dir, b'config.yaml')
+ # cli_config_path_2 = os.path.join(self.temp_dir, b'config_2.yaml')
+ #
+ # with open(cli_config_path_1, 'w') as file:
+ # file.write('first: value')
+ #
+ # with open(cli_config_path_2, 'w') as file:
+ # file.write('second: value')
+ #
+ # self.run_command('--config', cli_config_path_1,
+ # '--config', cli_config_path_2, 'test', lib=None)
+ # self.assertEqual(config['first'].get(), 'value')
+ # self.assertEqual(config['second'].get(), 'value')
+ #
+ # @unittest.skip('Difficult to implement with optparse')
+ # def test_multiple_cli_config_overwrite(self):
+ # cli_config_path = os.path.join(self.temp_dir, b'config.yaml')
+ # cli_overwrite_config_path = os.path.join(self.temp_dir,
+ # b'overwrite_config.yaml')
+ #
+ # with open(cli_config_path, 'w') as file:
+ # file.write('anoption: value')
+ #
+ # with open(cli_overwrite_config_path, 'w') as file:
+ # file.write('anoption: overwrite')
+ #
+ # self.run_command('--config', cli_config_path,
+ # '--config', cli_overwrite_config_path, 'test')
+ # self.assertEqual(config['anoption'].get(), 'cli overwrite')
# FIXME: fails on windows
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_cli_config_paths_resolve_relative_to_user_dir(self):
- cli_config_path = os.path.join(self.temp_dir, b'config.yaml')
- with open(cli_config_path, 'w') as file:
- file.write('library: beets.db\n')
- file.write('statefile: state')
+ cli_config_path = os.path.join(self.temp_dir, b"config.yaml")
+ with open(cli_config_path, "w") as file:
+ file.write("library: beets.db\n")
+ file.write("statefile: state")
- self.run_command('--config', cli_config_path, 'test', lib=None)
+ self.run_command("--config", cli_config_path, "test", lib=None)
self.assert_equal_path(
- util.bytestring_path(config['library'].as_filename()),
- os.path.join(self.user_config_dir, b'beets.db')
+ util.bytestring_path(config["library"].as_filename()),
+ os.path.join(self.user_config_dir, b"beets.db"),
)
self.assert_equal_path(
- util.bytestring_path(config['statefile'].as_filename()),
- os.path.join(self.user_config_dir, b'state')
+ util.bytestring_path(config["statefile"].as_filename()),
+ os.path.join(self.user_config_dir, b"state"),
)
def test_cli_config_paths_resolve_relative_to_beetsdir(self):
- os.environ['BEETSDIR'] = util.py3_path(self.beetsdir)
+ os.environ["BEETSDIR"] = util.py3_path(self.beetsdir)
- cli_config_path = os.path.join(self.temp_dir, b'config.yaml')
- with open(cli_config_path, 'w') as file:
- file.write('library: beets.db\n')
- file.write('statefile: state')
+ cli_config_path = os.path.join(self.temp_dir, b"config.yaml")
+ with open(cli_config_path, "w") as file:
+ file.write("library: beets.db\n")
+ file.write("statefile: state")
- self.run_command('--config', cli_config_path, 'test', lib=None)
+ self.run_command("--config", cli_config_path, "test", lib=None)
self.assert_equal_path(
- util.bytestring_path(config['library'].as_filename()),
- os.path.join(self.beetsdir, b'beets.db')
+ util.bytestring_path(config["library"].as_filename()),
+ os.path.join(self.beetsdir, b"beets.db"),
)
self.assert_equal_path(
- util.bytestring_path(config['statefile'].as_filename()),
- os.path.join(self.beetsdir, b'state')
+ util.bytestring_path(config["statefile"].as_filename()),
+ os.path.join(self.beetsdir, b"state"),
)
def test_command_line_option_relative_to_working_dir(self):
config.read()
os.chdir(syspath(self.temp_dir))
- self.run_command('--library', 'foo.db', 'test', lib=None)
- self.assert_equal_path(config['library'].as_filename(),
- os.path.join(os.getcwd(), 'foo.db'))
+ self.run_command("--library", "foo.db", "test", lib=None)
+ self.assert_equal_path(
+ config["library"].as_filename(), os.path.join(os.getcwd(), "foo.db")
+ )
def test_cli_config_file_loads_plugin_commands(self):
- cli_config_path = os.path.join(self.temp_dir, b'config.yaml')
- with open(cli_config_path, 'w') as file:
- file.write('pluginpath: %s\n' % _common.PLUGINPATH)
- file.write('plugins: test')
+ cli_config_path = os.path.join(self.temp_dir, b"config.yaml")
+ with open(cli_config_path, "w") as file:
+ file.write("pluginpath: %s\n" % _common.PLUGINPATH)
+ file.write("plugins: test")
- self.run_command('--config', cli_config_path, 'plugin', lib=None)
+ self.run_command("--config", cli_config_path, "plugin", lib=None)
self.assertTrue(plugins.find_plugins()[0].is_test_plugin)
def test_beetsdir_config(self):
- os.environ['BEETSDIR'] = util.py3_path(self.beetsdir)
+ os.environ["BEETSDIR"] = util.py3_path(self.beetsdir)
- env_config_path = os.path.join(self.beetsdir, b'config.yaml')
- with open(env_config_path, 'w') as file:
- file.write('anoption: overwrite')
+ env_config_path = os.path.join(self.beetsdir, b"config.yaml")
+ with open(env_config_path, "w") as file:
+ file.write("anoption: overwrite")
config.read()
- self.assertEqual(config['anoption'].get(), 'overwrite')
+ self.assertEqual(config["anoption"].get(), "overwrite")
def test_beetsdir_points_to_file_error(self):
- beetsdir = os.path.join(self.temp_dir, b'beetsfile')
- open(beetsdir, 'a').close()
- os.environ['BEETSDIR'] = util.py3_path(beetsdir)
- self.assertRaises(ConfigError, self.run_command, 'test')
+ beetsdir = os.path.join(self.temp_dir, b"beetsfile")
+ open(beetsdir, "a").close()
+ os.environ["BEETSDIR"] = util.py3_path(beetsdir)
+ self.assertRaises(ConfigError, self.run_command, "test")
def test_beetsdir_config_does_not_load_default_user_config(self):
- os.environ['BEETSDIR'] = util.py3_path(self.beetsdir)
+ os.environ["BEETSDIR"] = util.py3_path(self.beetsdir)
- with open(self.user_config_path, 'w') as file:
- file.write('anoption: value')
+ with open(self.user_config_path, "w") as file:
+ file.write("anoption: value")
config.read()
- self.assertFalse(config['anoption'].exists())
+ self.assertFalse(config["anoption"].exists())
def test_default_config_paths_resolve_relative_to_beetsdir(self):
- os.environ['BEETSDIR'] = util.py3_path(self.beetsdir)
+ os.environ["BEETSDIR"] = util.py3_path(self.beetsdir)
config.read()
self.assert_equal_path(
- util.bytestring_path(config['library'].as_filename()),
- os.path.join(self.beetsdir, b'library.db')
+ util.bytestring_path(config["library"].as_filename()),
+ os.path.join(self.beetsdir, b"library.db"),
)
self.assert_equal_path(
- util.bytestring_path(config['statefile'].as_filename()),
- os.path.join(self.beetsdir, b'state.pickle')
+ util.bytestring_path(config["statefile"].as_filename()),
+ os.path.join(self.beetsdir, b"state.pickle"),
)
def test_beetsdir_config_paths_resolve_relative_to_beetsdir(self):
- os.environ['BEETSDIR'] = util.py3_path(self.beetsdir)
+ os.environ["BEETSDIR"] = util.py3_path(self.beetsdir)
- env_config_path = os.path.join(self.beetsdir, b'config.yaml')
- with open(env_config_path, 'w') as file:
- file.write('library: beets.db\n')
- file.write('statefile: state')
+ env_config_path = os.path.join(self.beetsdir, b"config.yaml")
+ with open(env_config_path, "w") as file:
+ file.write("library: beets.db\n")
+ file.write("statefile: state")
config.read()
self.assert_equal_path(
- util.bytestring_path(config['library'].as_filename()),
- os.path.join(self.beetsdir, b'beets.db')
+ util.bytestring_path(config["library"].as_filename()),
+ os.path.join(self.beetsdir, b"beets.db"),
)
self.assert_equal_path(
- util.bytestring_path(config['statefile'].as_filename()),
- os.path.join(self.beetsdir, b'state')
+ util.bytestring_path(config["statefile"].as_filename()),
+ os.path.join(self.beetsdir, b"state"),
)
@@ -1132,40 +1156,40 @@ def _show(self, **kwargs):
def test_identical(self):
change, out = self._show()
self.assertFalse(change)
- self.assertEqual(out, '')
+ self.assertEqual(out, "")
def test_string_fixed_field_change(self):
- self.b.title = 'x'
+ self.b.title = "x"
change, out = self._show()
self.assertTrue(change)
- self.assertTrue('title' in out)
+ self.assertTrue("title" in out)
def test_int_fixed_field_change(self):
self.b.track = 9
change, out = self._show()
self.assertTrue(change)
- self.assertTrue('track' in out)
+ self.assertTrue("track" in out)
def test_floats_close_to_identical(self):
self.a.length = 1.00001
self.b.length = 1.00005
change, out = self._show()
self.assertFalse(change)
- self.assertEqual(out, '')
+ self.assertEqual(out, "")
def test_floats_different(self):
self.a.length = 1.00001
self.b.length = 2.00001
change, out = self._show()
self.assertTrue(change)
- self.assertTrue('length' in out)
+ self.assertTrue("length" in out)
def test_both_values_shown(self):
- self.a.title = 'foo'
- self.b.title = 'bar'
+ self.a.title = "foo"
+ self.b.title = "bar"
change, out = self._show()
- self.assertTrue('foo' in out)
- self.assertTrue('bar' in out)
+ self.assertTrue("foo" in out)
+ self.assertTrue("bar" in out)
class ShowChangeTest(_common.TestCase):
@@ -1175,26 +1199,36 @@ def setUp(self):
self.items = [_common.item()]
self.items[0].track = 1
- self.items[0].path = b'/path/to/file.mp3'
+ self.items[0].path = b"/path/to/file.mp3"
self.info = autotag.AlbumInfo(
- album='the album', album_id='album id', artist='the artist',
- artist_id='artist id', tracks=[
- autotag.TrackInfo(title='the title', track_id='track id',
- index=1)
- ]
+ album="the album",
+ album_id="album id",
+ artist="the artist",
+ artist_id="artist id",
+ tracks=[
+ autotag.TrackInfo(
+ title="the title", track_id="track id", index=1
+ )
+ ],
)
- def _show_change(self, items=None, info=None, color=False,
- cur_artist='the artist', cur_album='the album',
- dist=0.1):
+ def _show_change(
+ self,
+ items=None,
+ info=None,
+ color=False,
+ cur_artist="the artist",
+ cur_album="the album",
+ dist=0.1,
+ ):
"""Return an unicode string representing the changes"""
items = items or self.items
info = info or self.info
mapping = dict(zip(items, info.tracks))
- config['ui']['color'] = color
- config['import']['detail'] = True
+ config["ui"]["color"] = color
+ config["import"]["detail"] = True
change_dist = distance(items, info, mapping)
- change_dist._penalties = {'album': [dist], 'artist': [dist]}
+ change_dist._penalties = {"album": [dist], "artist": [dist]}
commands.show_change(
cur_artist,
cur_album,
@@ -1204,41 +1238,40 @@ def _show_change(self, items=None, info=None, color=False,
def test_null_change(self):
msg = self._show_change()
- self.assertTrue('match (90.0%)' in msg)
- self.assertTrue('album, artist' in msg)
+ self.assertTrue("match (90.0%)" in msg)
+ self.assertTrue("album, artist" in msg)
def test_album_data_change(self):
- msg = self._show_change(cur_artist='another artist',
- cur_album='another album')
- self.assertTrue('another artist -> the artist' in msg)
- self.assertTrue('another album -> the album' in msg)
+ msg = self._show_change(
+ cur_artist="another artist", cur_album="another album"
+ )
+ self.assertTrue("another artist -> the artist" in msg)
+ self.assertTrue("another album -> the album" in msg)
def test_item_data_change(self):
- self.items[0].title = 'different'
+ self.items[0].title = "different"
msg = self._show_change()
- self.assertTrue('different' in msg and 'the title' in msg)
+ self.assertTrue("different" in msg and "the title" in msg)
def test_item_data_change_with_unicode(self):
- self.items[0].title = 'caf\xe9'
+ self.items[0].title = "caf\xe9"
msg = self._show_change()
- self.assertTrue(u'caf\xe9' in msg and 'the title' in msg)
+ self.assertTrue("caf\xe9" in msg and "the title" in msg)
def test_album_data_change_with_unicode(self):
- msg = self._show_change(cur_artist=u'caf\xe9',
- cur_album=u'another album')
- self.assertTrue(u'caf\xe9' in msg and 'the artist' in msg)
+ msg = self._show_change(cur_artist="caf\xe9", cur_album="another album")
+ self.assertTrue("caf\xe9" in msg and "the artist" in msg)
def test_item_data_change_title_missing(self):
- self.items[0].title = ''
- msg = re.sub(r' +', ' ', self._show_change())
- self.assertTrue(u'file.mp3' in msg and 'the title' in msg)
+ self.items[0].title = ""
+ msg = re.sub(r" +", " ", self._show_change())
+ self.assertTrue("file.mp3" in msg and "the title" in msg)
def test_item_data_change_title_missing_with_unicode_filename(self):
- self.items[0].title = ''
- self.items[0].path = '/path/to/caf\xe9.mp3'.encode()
- msg = re.sub(r' +', ' ', self._show_change())
- self.assertTrue(u'caf\xe9.mp3' in msg or
- u'caf.mp3' in msg)
+ self.items[0].title = ""
+ self.items[0].path = "/path/to/caf\xe9.mp3".encode()
+ msg = re.sub(r" +", " ", self._show_change())
+ self.assertTrue("caf\xe9.mp3" in msg or "caf.mp3" in msg)
def test_colorize(self):
self.assertEqual("test", ui.uncolorize("test"))
@@ -1265,9 +1298,11 @@ def test_split_into_lines(self):
self.assertEqual(txt, ["test", "test", "test"])
# Test multiple colored texts
colored_text = "\x1b[31mtest \x1b[39;49;00m" * 3
- split_txt = ["\x1b[31mtest\x1b[39;49;00m",
- "\x1b[31mtest\x1b[39;49;00m",
- "\x1b[31mtest\x1b[39;49;00m"]
+ split_txt = [
+ "\x1b[31mtest\x1b[39;49;00m",
+ "\x1b[31mtest\x1b[39;49;00m",
+ "\x1b[31mtest\x1b[39;49;00m",
+ ]
txt = ui.split_into_lines(colored_text, [5, 5, 5])
self.assertEqual(txt, split_txt)
# Test single color, multi space text
@@ -1284,43 +1319,42 @@ def test_split_into_lines(self):
def test_album_data_change_wrap_newline(self):
# Patch ui.term_width to force wrapping
- with patch('beets.ui.commands.ui.term_width', return_value=30):
+ with patch("beets.ui.commands.ui.term_width", return_value=30):
# Test newline layout
- config['ui']['import']['layout'] = u'newline'
- long_name = u'another artist with a' + (u' very' * 10) + \
- u' long name'
- msg = self._show_change(cur_artist=long_name,
- cur_album='another album')
+ config["ui"]["import"]["layout"] = "newline"
+ long_name = "another artist with a" + (" very" * 10) + " long name"
+ msg = self._show_change(
+ cur_artist=long_name, cur_album="another album"
+ )
# _common.log.info("Message:{}".format(msg))
- self.assertTrue('artist: another artist' in msg)
- self.assertTrue(' -> the artist' in msg)
- self.assertFalse('another album -> the album' in msg)
+ self.assertTrue("artist: another artist" in msg)
+ self.assertTrue(" -> the artist" in msg)
+ self.assertFalse("another album -> the album" in msg)
def test_item_data_change_wrap_column(self):
# Patch ui.term_width to force wrapping
- with patch('beets.ui.commands.ui.term_width', return_value=54):
+ with patch("beets.ui.commands.ui.term_width", return_value=54):
# Test Column layout
- config['ui']['import']['layout'] = u'column'
- long_title = u'a track with a' + (u' very' * 10) + \
- u' long name'
+ config["ui"]["import"]["layout"] = "column"
+ long_title = "a track with a" + (" very" * 10) + " long name"
self.items[0].title = long_title
msg = self._show_change()
- self.assertTrue('(#1) a track (1:00) -> (#1) the title (0:00)'
- in msg)
+ self.assertTrue(
+ "(#1) a track (1:00) -> (#1) the title (0:00)" in msg
+ )
def test_item_data_change_wrap_newline(self):
# Patch ui.term_width to force wrapping
- with patch('beets.ui.commands.ui.term_width', return_value=30):
- config['ui']['import']['layout'] = u'newline'
- long_title = u'a track with a' + (u' very' * 10) + \
- u' long name'
+ with patch("beets.ui.commands.ui.term_width", return_value=30):
+ config["ui"]["import"]["layout"] = "newline"
+ long_title = "a track with a" + (" very" * 10) + " long name"
self.items[0].title = long_title
msg = self._show_change()
- self.assertTrue('(#1) a track with' in msg)
- self.assertTrue(' -> (#1) the title (0:00)' in msg)
+ self.assertTrue("(#1) a track with" in msg)
+ self.assertTrue(" -> (#1) the title (0:00)" in msg)
-@patch('beets.library.Item.try_filesize', Mock(return_value=987))
+@patch("beets.library.Item.try_filesize", Mock(return_value=987))
class SummarizeItemsTest(_common.TestCase):
def setUp(self):
super().setUp()
@@ -1362,41 +1396,42 @@ class PathFormatTest(_common.TestCase):
def test_custom_paths_prepend(self):
default_formats = ui.get_path_formats()
- config['paths'] = {'foo': 'bar'}
+ config["paths"] = {"foo": "bar"}
pf = ui.get_path_formats()
key, tmpl = pf[0]
- self.assertEqual(key, 'foo')
- self.assertEqual(tmpl.original, 'bar')
+ self.assertEqual(key, "foo")
+ self.assertEqual(tmpl.original, "bar")
self.assertEqual(pf[1:], default_formats)
@_common.slow_test()
class PluginTest(_common.TestCase, TestHelper):
def test_plugin_command_from_pluginpath(self):
- config['pluginpath'] = [_common.PLUGINPATH]
- config['plugins'] = ['test']
- self.run_command('test', lib=None)
+ config["pluginpath"] = [_common.PLUGINPATH]
+ config["plugins"] = ["test"]
+ self.run_command("test", lib=None)
@_common.slow_test()
class CompletionTest(_common.TestCase, TestHelper):
def test_completion(self):
# Load plugin commands
- config['pluginpath'] = [_common.PLUGINPATH]
- config['plugins'] = ['test']
+ config["pluginpath"] = [_common.PLUGINPATH]
+ config["plugins"] = ["test"]
# Do not load any other bash completion scripts on the system.
env = dict(os.environ)
- env['BASH_COMPLETION_DIR'] = os.devnull
- env['BASH_COMPLETION_COMPAT_DIR'] = os.devnull
+ env["BASH_COMPLETION_DIR"] = os.devnull
+ env["BASH_COMPLETION_COMPAT_DIR"] = os.devnull
# Open a `bash` process to run the tests in. We'll pipe in bash
# commands via stdin.
- cmd = os.environ.get('BEETS_TEST_SHELL', '/bin/bash --norc').split()
+ cmd = os.environ.get("BEETS_TEST_SHELL", "/bin/bash --norc").split()
if not has_program(cmd[0]):
- self.skipTest('bash not available')
- tester = subprocess.Popen(cmd, stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, env=env)
+ self.skipTest("bash not available")
+ tester = subprocess.Popen(
+ cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, env=env
+ )
# Load bash_completion library.
for path in commands.BASH_COMPLETION_PATHS:
@@ -1404,38 +1439,39 @@ def test_completion(self):
bash_completion = path
break
else:
- self.skipTest('bash-completion script not found')
+ self.skipTest("bash-completion script not found")
try:
- with open(util.syspath(bash_completion), 'rb') as f:
+ with open(util.syspath(bash_completion), "rb") as f:
tester.stdin.writelines(f)
except OSError:
- self.skipTest('could not read bash-completion script')
+ self.skipTest("could not read bash-completion script")
# Load completion script.
self.io.install()
- self.run_command('completion', lib=None)
- completion_script = self.io.getoutput().encode('utf-8')
+ self.run_command("completion", lib=None)
+ completion_script = self.io.getoutput().encode("utf-8")
self.io.restore()
tester.stdin.writelines(completion_script.splitlines(True))
# Load test suite.
- test_script_name = os.path.join(_common.RSRC, b'test_completion.sh')
- with open(test_script_name, 'rb') as test_script_file:
+ test_script_name = os.path.join(_common.RSRC, b"test_completion.sh")
+ with open(test_script_name, "rb") as test_script_file:
tester.stdin.writelines(test_script_file)
out, err = tester.communicate()
- if tester.returncode != 0 or out != b'completion tests passed\n':
- print(out.decode('utf-8'))
- self.fail('test/test_completion.sh did not execute properly')
+ if tester.returncode != 0 or out != b"completion tests passed\n":
+ print(out.decode("utf-8"))
+ self.fail("test/test_completion.sh did not execute properly")
class CommonOptionsParserCliTest(unittest.TestCase, TestHelper):
"""Test CommonOptionsParser and formatting LibModel formatting on 'list'
command.
"""
+
def setUp(self):
self.setup_beets()
self.item = _common.item()
- self.item.path = b'xxx/yyy'
+ self.item.path = b"xxx/yyy"
self.lib.add(self.item)
self.lib.add_album([self.item])
self.load_plugins()
@@ -1445,63 +1481,65 @@ def tearDown(self):
self.teardown_beets()
def test_base(self):
- l = self.run_with_output('ls')
- self.assertEqual(l, 'the artist - the album - the title\n')
+ l = self.run_with_output("ls")
+ self.assertEqual(l, "the artist - the album - the title\n")
- l = self.run_with_output('ls', '-a')
- self.assertEqual(l, 'the album artist - the album\n')
+ l = self.run_with_output("ls", "-a")
+ self.assertEqual(l, "the album artist - the album\n")
def test_path_option(self):
- l = self.run_with_output('ls', '-p')
- self.assertEqual(l, 'xxx/yyy\n')
+ l = self.run_with_output("ls", "-p")
+ self.assertEqual(l, "xxx/yyy\n")
- l = self.run_with_output('ls', '-a', '-p')
- self.assertEqual(l, 'xxx\n')
+ l = self.run_with_output("ls", "-a", "-p")
+ self.assertEqual(l, "xxx\n")
def test_format_option(self):
- l = self.run_with_output('ls', '-f', '$artist')
- self.assertEqual(l, 'the artist\n')
+ l = self.run_with_output("ls", "-f", "$artist")
+ self.assertEqual(l, "the artist\n")
- l = self.run_with_output('ls', '-a', '-f', '$albumartist')
- self.assertEqual(l, 'the album artist\n')
+ l = self.run_with_output("ls", "-a", "-f", "$albumartist")
+ self.assertEqual(l, "the album artist\n")
def test_format_option_unicode(self):
- l = self.run_with_output(b'ls', b'-f',
- 'caf\xe9'.encode(util.arg_encoding()))
- self.assertEqual(l, 'caf\xe9\n')
+ l = self.run_with_output(
+ b"ls", b"-f", "caf\xe9".encode(util.arg_encoding())
+ )
+ self.assertEqual(l, "caf\xe9\n")
def test_root_format_option(self):
- l = self.run_with_output('--format-item', '$artist',
- '--format-album', 'foo', 'ls')
- self.assertEqual(l, 'the artist\n')
+ l = self.run_with_output(
+ "--format-item", "$artist", "--format-album", "foo", "ls"
+ )
+ self.assertEqual(l, "the artist\n")
- l = self.run_with_output('--format-item', 'foo',
- '--format-album', '$albumartist',
- 'ls', '-a')
- self.assertEqual(l, 'the album artist\n')
+ l = self.run_with_output(
+ "--format-item", "foo", "--format-album", "$albumartist", "ls", "-a"
+ )
+ self.assertEqual(l, "the album artist\n")
def test_help(self):
- l = self.run_with_output('help')
- self.assertIn('Usage:', l)
+ l = self.run_with_output("help")
+ self.assertIn("Usage:", l)
- l = self.run_with_output('help', 'list')
- self.assertIn('Usage:', l)
+ l = self.run_with_output("help", "list")
+ self.assertIn("Usage:", l)
with self.assertRaises(ui.UserError):
- self.run_command('help', 'this.is.not.a.real.command')
+ self.run_command("help", "this.is.not.a.real.command")
def test_stats(self):
- l = self.run_with_output('stats')
- self.assertIn('Approximate total size:', l)
+ l = self.run_with_output("stats")
+ self.assertIn("Approximate total size:", l)
# # Need to have more realistic library setup for this to work
# l = self.run_with_output('stats', '-e')
# self.assertIn('Total size:', l)
def test_version(self):
- l = self.run_with_output('version')
- self.assertIn('Python version', l)
- self.assertIn('no plugins loaded', l)
+ l = self.run_with_output("version")
+ self.assertIn("Python version", l)
+ self.assertIn("no plugins loaded", l)
# # Need to have plugin loaded
# l = self.run_with_output('version')
@@ -1521,85 +1559,92 @@ def test_album_option(self):
parser.add_album_option()
self.assertTrue(bool(parser._album_flags))
- self.assertEqual(parser.parse_args([]), ({'album': None}, []))
- self.assertEqual(parser.parse_args(['-a']), ({'album': True}, []))
- self.assertEqual(parser.parse_args(['--album']),
- ({'album': True}, []))
+ self.assertEqual(parser.parse_args([]), ({"album": None}, []))
+ self.assertEqual(parser.parse_args(["-a"]), ({"album": True}, []))
+ self.assertEqual(parser.parse_args(["--album"]), ({"album": True}, []))
def test_path_option(self):
parser = ui.CommonOptionsParser()
parser.add_path_option()
self.assertFalse(parser._album_flags)
- config['format_item'].set('$foo')
- self.assertEqual(parser.parse_args([]), ({'path': None}, []))
- self.assertEqual(config['format_item'].as_str(), '$foo')
+ config["format_item"].set("$foo")
+ self.assertEqual(parser.parse_args([]), ({"path": None}, []))
+ self.assertEqual(config["format_item"].as_str(), "$foo")
- self.assertEqual(parser.parse_args(['-p']),
- ({'path': True, 'format': '$path'}, []))
- self.assertEqual(parser.parse_args(['--path']),
- ({'path': True, 'format': '$path'}, []))
+ self.assertEqual(
+ parser.parse_args(["-p"]), ({"path": True, "format": "$path"}, [])
+ )
+ self.assertEqual(
+ parser.parse_args(["--path"]),
+ ({"path": True, "format": "$path"}, []),
+ )
- self.assertEqual(config['format_item'].as_str(), '$path')
- self.assertEqual(config['format_album'].as_str(), '$path')
+ self.assertEqual(config["format_item"].as_str(), "$path")
+ self.assertEqual(config["format_album"].as_str(), "$path")
def test_format_option(self):
parser = ui.CommonOptionsParser()
parser.add_format_option()
self.assertFalse(parser._album_flags)
- config['format_item'].set('$foo')
- self.assertEqual(parser.parse_args([]), ({'format': None}, []))
- self.assertEqual(config['format_item'].as_str(), '$foo')
+ config["format_item"].set("$foo")
+ self.assertEqual(parser.parse_args([]), ({"format": None}, []))
+ self.assertEqual(config["format_item"].as_str(), "$foo")
- self.assertEqual(parser.parse_args(['-f', '$bar']),
- ({'format': '$bar'}, []))
- self.assertEqual(parser.parse_args(['--format', '$baz']),
- ({'format': '$baz'}, []))
+ self.assertEqual(
+ parser.parse_args(["-f", "$bar"]), ({"format": "$bar"}, [])
+ )
+ self.assertEqual(
+ parser.parse_args(["--format", "$baz"]), ({"format": "$baz"}, [])
+ )
- self.assertEqual(config['format_item'].as_str(), '$baz')
- self.assertEqual(config['format_album'].as_str(), '$baz')
+ self.assertEqual(config["format_item"].as_str(), "$baz")
+ self.assertEqual(config["format_album"].as_str(), "$baz")
def test_format_option_with_target(self):
with self.assertRaises(KeyError):
- ui.CommonOptionsParser().add_format_option(target='thingy')
+ ui.CommonOptionsParser().add_format_option(target="thingy")
parser = ui.CommonOptionsParser()
- parser.add_format_option(target='item')
+ parser.add_format_option(target="item")
- config['format_item'].set('$item')
- config['format_album'].set('$album')
+ config["format_item"].set("$item")
+ config["format_album"].set("$album")
- self.assertEqual(parser.parse_args(['-f', '$bar']),
- ({'format': '$bar'}, []))
+ self.assertEqual(
+ parser.parse_args(["-f", "$bar"]), ({"format": "$bar"}, [])
+ )
- self.assertEqual(config['format_item'].as_str(), '$bar')
- self.assertEqual(config['format_album'].as_str(), '$album')
+ self.assertEqual(config["format_item"].as_str(), "$bar")
+ self.assertEqual(config["format_album"].as_str(), "$album")
def test_format_option_with_album(self):
parser = ui.CommonOptionsParser()
parser.add_album_option()
parser.add_format_option()
- config['format_item'].set('$item')
- config['format_album'].set('$album')
+ config["format_item"].set("$item")
+ config["format_album"].set("$album")
- parser.parse_args(['-f', '$bar'])
- self.assertEqual(config['format_item'].as_str(), '$bar')
- self.assertEqual(config['format_album'].as_str(), '$album')
+ parser.parse_args(["-f", "$bar"])
+ self.assertEqual(config["format_item"].as_str(), "$bar")
+ self.assertEqual(config["format_album"].as_str(), "$album")
- parser.parse_args(['-a', '-f', '$foo'])
- self.assertEqual(config['format_item'].as_str(), '$bar')
- self.assertEqual(config['format_album'].as_str(), '$foo')
+ parser.parse_args(["-a", "-f", "$foo"])
+ self.assertEqual(config["format_item"].as_str(), "$bar")
+ self.assertEqual(config["format_album"].as_str(), "$foo")
- parser.parse_args(['-f', '$foo2', '-a'])
- self.assertEqual(config['format_album'].as_str(), '$foo2')
+ parser.parse_args(["-f", "$foo2", "-a"])
+ self.assertEqual(config["format_album"].as_str(), "$foo2")
def test_add_all_common_options(self):
parser = ui.CommonOptionsParser()
parser.add_all_common_options()
- self.assertEqual(parser.parse_args([]),
- ({'album': None, 'path': None, 'format': None}, []))
+ self.assertEqual(
+ parser.parse_args([]),
+ ({"album": None, "path": None, "format": None}, []),
+ )
class EncodingTest(_common.TestCase):
@@ -1608,26 +1653,27 @@ class EncodingTest(_common.TestCase):
"""
def out_encoding_overridden(self):
- config['terminal_encoding'] = 'fake_encoding'
- self.assertEqual(ui._out_encoding(), 'fake_encoding')
+ config["terminal_encoding"] = "fake_encoding"
+ self.assertEqual(ui._out_encoding(), "fake_encoding")
def in_encoding_overridden(self):
- config['terminal_encoding'] = 'fake_encoding'
- self.assertEqual(ui._in_encoding(), 'fake_encoding')
+ config["terminal_encoding"] = "fake_encoding"
+ self.assertEqual(ui._in_encoding(), "fake_encoding")
def out_encoding_default_utf8(self):
- with patch('sys.stdout') as stdout:
+ with patch("sys.stdout") as stdout:
stdout.encoding = None
- self.assertEqual(ui._out_encoding(), 'utf-8')
+ self.assertEqual(ui._out_encoding(), "utf-8")
def in_encoding_default_utf8(self):
- with patch('sys.stdin') as stdin:
+ with patch("sys.stdin") as stdin:
stdin.encoding = None
- self.assertEqual(ui._in_encoding(), 'utf-8')
+ self.assertEqual(ui._in_encoding(), "utf-8")
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_ui_commands.py b/test/test_ui_commands.py
index 36b380b6f6..3c53b93a3f 100644
--- a/test/test_ui_commands.py
+++ b/test/test_ui_commands.py
@@ -19,11 +19,9 @@
import os
import shutil
import unittest
-
from test import _common
-from beets import library
-from beets import ui
+from beets import library, ui
from beets.ui import commands
from beets.util import syspath
@@ -32,16 +30,16 @@ class QueryTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.libdir = os.path.join(self.temp_dir, b'testlibdir')
+ self.libdir = os.path.join(self.temp_dir, b"testlibdir")
os.mkdir(syspath(self.libdir))
# Add a file to the library but don't copy it in yet.
- self.lib = library.Library(':memory:', self.libdir)
+ self.lib = library.Library(":memory:", self.libdir)
# Alternate destination directory.
- self.otherdir = os.path.join(self.temp_dir, b'testotherdir')
+ self.otherdir = os.path.join(self.temp_dir, b"testotherdir")
- def add_item(self, filename=b'srcfile', templatefile=b'full.mp3'):
+ def add_item(self, filename=b"srcfile", templatefile=b"full.mp3"):
itempath = os.path.join(self.libdir, filename)
shutil.copy(
syspath(os.path.join(_common.RSRC, templatefile)),
@@ -55,10 +53,10 @@ def add_album(self, items):
album = self.lib.add_album(items)
return album
- def check_do_query(self, num_items, num_albums,
- q=(), album=False, also_items=True):
- items, albums = commands._do_query(
- self.lib, q, album, also_items)
+ def check_do_query(
+ self, num_items, num_albums, q=(), album=False, also_items=True
+ ):
+ items, albums = commands._do_query(self.lib, q, album, also_items)
self.assertEqual(len(items), num_items)
self.assertEqual(len(albums), num_albums)
@@ -121,5 +119,6 @@ def test_fields_func(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_ui_importer.py b/test/test_ui_importer.py
index 88daa0ce18..0b8be163a6 100644
--- a/test/test_ui_importer.py
+++ b/test/test_ui_importer.py
@@ -19,18 +19,16 @@
"""
import unittest
-
-from test._common import DummyIO
from test import test_importer
+from test._common import DummyIO
+
+from beets import config, importer
from beets.ui.commands import TerminalImportSession
-from beets import importer
-from beets import config
class TerminalImportSessionFixture(TerminalImportSession):
-
def __init__(self, *args, **kwargs):
- self.io = kwargs.pop('io')
+ self.io = kwargs.pop("io")
super().__init__(*args, **kwargs)
self._choices = []
@@ -57,95 +55,112 @@ def _add_choice_input(self):
choice = self.default_choice
if choice == importer.action.APPLY:
- self.io.addinput('A')
+ self.io.addinput("A")
elif choice == importer.action.ASIS:
- self.io.addinput('U')
+ self.io.addinput("U")
elif choice == importer.action.ALBUMS:
- self.io.addinput('G')
+ self.io.addinput("G")
elif choice == importer.action.TRACKS:
- self.io.addinput('T')
+ self.io.addinput("T")
elif choice == importer.action.SKIP:
- self.io.addinput('S')
+ self.io.addinput("S")
elif isinstance(choice, int):
- self.io.addinput('M')
+ self.io.addinput("M")
self.io.addinput(str(choice))
self._add_choice_input()
else:
- raise Exception('Unknown choice %s' % choice)
+ raise Exception("Unknown choice %s" % choice)
class TerminalImportSessionSetup:
- """Overwrites test_importer.ImportHelper to provide a terminal importer
- """
-
- def _setup_import_session(self, import_dir=None, delete=False,
- threaded=False, copy=True, singletons=False,
- move=False, autotag=True):
- config['import']['copy'] = copy
- config['import']['delete'] = delete
- config['import']['timid'] = True
- config['threaded'] = False
- config['import']['singletons'] = singletons
- config['import']['move'] = move
- config['import']['autotag'] = autotag
- config['import']['resume'] = False
-
- if not hasattr(self, 'io'):
+ """Overwrites test_importer.ImportHelper to provide a terminal importer"""
+
+ def _setup_import_session(
+ self,
+ import_dir=None,
+ delete=False,
+ threaded=False,
+ copy=True,
+ singletons=False,
+ move=False,
+ autotag=True,
+ ):
+ config["import"]["copy"] = copy
+ config["import"]["delete"] = delete
+ config["import"]["timid"] = True
+ config["threaded"] = False
+ config["import"]["singletons"] = singletons
+ config["import"]["move"] = move
+ config["import"]["autotag"] = autotag
+ config["import"]["resume"] = False
+
+ if not hasattr(self, "io"):
self.io = DummyIO()
self.io.install()
self.importer = TerminalImportSessionFixture(
- self.lib, loghandler=None, query=None, io=self.io,
+ self.lib,
+ loghandler=None,
+ query=None,
+ io=self.io,
paths=[import_dir or self.import_dir],
)
-class NonAutotaggedImportTest(TerminalImportSessionSetup,
- test_importer.NonAutotaggedImportTest):
+class NonAutotaggedImportTest(
+ TerminalImportSessionSetup, test_importer.NonAutotaggedImportTest
+):
pass
-class ImportTest(TerminalImportSessionSetup,
- test_importer.ImportTest):
+class ImportTest(TerminalImportSessionSetup, test_importer.ImportTest):
pass
-class ImportSingletonTest(TerminalImportSessionSetup,
- test_importer.ImportSingletonTest):
+class ImportSingletonTest(
+ TerminalImportSessionSetup, test_importer.ImportSingletonTest
+):
pass
-class ImportTracksTest(TerminalImportSessionSetup,
- test_importer.ImportTracksTest):
+class ImportTracksTest(
+ TerminalImportSessionSetup, test_importer.ImportTracksTest
+):
pass
-class ImportCompilationTest(TerminalImportSessionSetup,
- test_importer.ImportCompilationTest):
+class ImportCompilationTest(
+ TerminalImportSessionSetup, test_importer.ImportCompilationTest
+):
pass
-class ImportExistingTest(TerminalImportSessionSetup,
- test_importer.ImportExistingTest):
+class ImportExistingTest(
+ TerminalImportSessionSetup, test_importer.ImportExistingTest
+):
pass
-class ChooseCandidateTest(TerminalImportSessionSetup,
- test_importer.ChooseCandidateTest):
+class ChooseCandidateTest(
+ TerminalImportSessionSetup, test_importer.ChooseCandidateTest
+):
pass
-class GroupAlbumsImportTest(TerminalImportSessionSetup,
- test_importer.GroupAlbumsImportTest):
+class GroupAlbumsImportTest(
+ TerminalImportSessionSetup, test_importer.GroupAlbumsImportTest
+):
pass
-class GlobalGroupAlbumsImportTest(TerminalImportSessionSetup,
- test_importer.GlobalGroupAlbumsImportTest):
+class GlobalGroupAlbumsImportTest(
+ TerminalImportSessionSetup, test_importer.GlobalGroupAlbumsImportTest
+):
pass
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_ui_init.py b/test/test_ui_init.py
index 4cee89d411..70ed3e2290 100644
--- a/test/test_ui_init.py
+++ b/test/test_ui_init.py
@@ -18,13 +18,12 @@
import os
import shutil
import unittest
-from random import random
from copy import deepcopy
-
-from beets import ui
+from random import random
from test import _common
from test.helper import control_stdin
-from beets import config
+
+from beets import config, ui
class InputMethodsTest(_common.TestCase):
@@ -39,52 +38,56 @@ def _print_helper2(self, s, prefix):
print(prefix, s)
def test_input_select_objects(self):
- full_items = ['1', '2', '3', '4', '5']
+ full_items = ["1", "2", "3", "4", "5"]
# Test no
- self.io.addinput('n')
+ self.io.addinput("n")
items = ui.input_select_objects(
- "Prompt", full_items, self._print_helper)
+ "Prompt", full_items, self._print_helper
+ )
self.assertEqual(items, [])
# Test yes
- self.io.addinput('y')
+ self.io.addinput("y")
items = ui.input_select_objects(
- "Prompt", full_items, self._print_helper)
+ "Prompt", full_items, self._print_helper
+ )
self.assertEqual(items, full_items)
# Test selective 1
- self.io.addinput('s')
- self.io.addinput('n')
- self.io.addinput('y')
- self.io.addinput('n')
- self.io.addinput('y')
- self.io.addinput('n')
+ self.io.addinput("s")
+ self.io.addinput("n")
+ self.io.addinput("y")
+ self.io.addinput("n")
+ self.io.addinput("y")
+ self.io.addinput("n")
items = ui.input_select_objects(
- "Prompt", full_items, self._print_helper)
- self.assertEqual(items, ['2', '4'])
+ "Prompt", full_items, self._print_helper
+ )
+ self.assertEqual(items, ["2", "4"])
# Test selective 2
- self.io.addinput('s')
- self.io.addinput('y')
- self.io.addinput('y')
- self.io.addinput('n')
- self.io.addinput('y')
- self.io.addinput('n')
+ self.io.addinput("s")
+ self.io.addinput("y")
+ self.io.addinput("y")
+ self.io.addinput("n")
+ self.io.addinput("y")
+ self.io.addinput("n")
items = ui.input_select_objects(
- "Prompt", full_items,
- lambda s: self._print_helper2(s, "Prefix"))
- self.assertEqual(items, ['1', '2', '4'])
+ "Prompt", full_items, lambda s: self._print_helper2(s, "Prefix")
+ )
+ self.assertEqual(items, ["1", "2", "4"])
# Test selective 3
- self.io.addinput('s')
- self.io.addinput('y')
- self.io.addinput('n')
- self.io.addinput('y')
- self.io.addinput('q')
+ self.io.addinput("s")
+ self.io.addinput("y")
+ self.io.addinput("n")
+ self.io.addinput("y")
+ self.io.addinput("q")
items = ui.input_select_objects(
- "Prompt", full_items, self._print_helper)
- self.assertEqual(items, ['1', '3'])
+ "Prompt", full_items, self._print_helper
+ )
+ self.assertEqual(items, ["1", "3"])
class InitTest(_common.LibTestCase):
@@ -93,34 +96,34 @@ def setUp(self):
def test_human_bytes(self):
tests = [
- (0, '0.0 B'),
- (30, '30.0 B'),
- (pow(2, 10), '1.0 KiB'),
- (pow(2, 20), '1.0 MiB'),
- (pow(2, 30), '1.0 GiB'),
- (pow(2, 40), '1.0 TiB'),
- (pow(2, 50), '1.0 PiB'),
- (pow(2, 60), '1.0 EiB'),
- (pow(2, 70), '1.0 ZiB'),
- (pow(2, 80), '1.0 YiB'),
- (pow(2, 90), '1.0 HiB'),
- (pow(2, 100), 'big'),
+ (0, "0.0 B"),
+ (30, "30.0 B"),
+ (pow(2, 10), "1.0 KiB"),
+ (pow(2, 20), "1.0 MiB"),
+ (pow(2, 30), "1.0 GiB"),
+ (pow(2, 40), "1.0 TiB"),
+ (pow(2, 50), "1.0 PiB"),
+ (pow(2, 60), "1.0 EiB"),
+ (pow(2, 70), "1.0 ZiB"),
+ (pow(2, 80), "1.0 YiB"),
+ (pow(2, 90), "1.0 HiB"),
+ (pow(2, 100), "big"),
]
for i, h in tests:
self.assertEqual(h, ui.human_bytes(i))
def test_human_seconds(self):
tests = [
- (0, '0.0 seconds'),
- (30, '30.0 seconds'),
- (60, '1.0 minutes'),
- (90, '1.5 minutes'),
- (125, '2.1 minutes'),
- (3600, '1.0 hours'),
- (86400, '1.0 days'),
- (604800, '1.0 weeks'),
- (31449600, '1.0 years'),
- (314496000, '1.0 decades'),
+ (0, "0.0 seconds"),
+ (30, "30.0 seconds"),
+ (60, "1.0 minutes"),
+ (90, "1.5 minutes"),
+ (125, "2.1 minutes"),
+ (3600, "1.0 hours"),
+ (86400, "1.0 days"),
+ (604800, "1.0 weeks"),
+ (31449600, "1.0 years"),
+ (314496000, "1.0 decades"),
]
for i, h in tests:
self.assertEqual(h, ui.human_seconds(i))
@@ -128,25 +131,28 @@ def test_human_seconds(self):
class ParentalDirCreation(_common.TestCase):
def test_create_yes(self):
- non_exist_path = _common.util.py3_path(os.path.join(
- self.temp_dir, b'nonexist', str(random()).encode()))
+ non_exist_path = _common.util.py3_path(
+ os.path.join(self.temp_dir, b"nonexist", str(random()).encode())
+ )
# Deepcopy instead of recovering because exceptions might
# occur; wish I can use a golang defer here.
test_config = deepcopy(config)
- test_config['library'] = non_exist_path
- with control_stdin('y'):
+ test_config["library"] = non_exist_path
+ with control_stdin("y"):
lib = ui._open_library(test_config)
lib._close()
def test_create_no(self):
non_exist_path_parent = _common.util.py3_path(
- os.path.join(self.temp_dir, b'nonexist'))
- non_exist_path = _common.util.py3_path(os.path.join(
- non_exist_path_parent.encode(), str(random()).encode()))
+ os.path.join(self.temp_dir, b"nonexist")
+ )
+ non_exist_path = _common.util.py3_path(
+ os.path.join(non_exist_path_parent.encode(), str(random()).encode())
+ )
test_config = deepcopy(config)
- test_config['library'] = non_exist_path
+ test_config["library"] = non_exist_path
- with control_stdin('n'):
+ with control_stdin("n"):
try:
lib = ui._open_library(test_config)
except ui.UserError:
@@ -163,5 +169,5 @@ def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_util.py b/test/test_util.py
index 8c16243a5b..26da2bf89f 100644
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -20,126 +20,137 @@
import subprocess
import sys
import unittest
-
-from unittest.mock import patch, Mock
-
from test import _common
+from unittest.mock import Mock, patch
+
from beets import util
class UtilTest(unittest.TestCase):
def test_open_anything(self):
- with _common.system_mock('Windows'):
- self.assertEqual(util.open_anything(), 'start')
+ with _common.system_mock("Windows"):
+ self.assertEqual(util.open_anything(), "start")
- with _common.system_mock('Darwin'):
- self.assertEqual(util.open_anything(), 'open')
+ with _common.system_mock("Darwin"):
+ self.assertEqual(util.open_anything(), "open")
- with _common.system_mock('Tagada'):
- self.assertEqual(util.open_anything(), 'xdg-open')
+ with _common.system_mock("Tagada"):
+ self.assertEqual(util.open_anything(), "xdg-open")
- @patch('os.execlp')
- @patch('beets.util.open_anything')
+ @patch("os.execlp")
+ @patch("beets.util.open_anything")
def test_interactive_open(self, mock_open, mock_execlp):
- mock_open.return_value = 'tagada'
- util.interactive_open(['foo'], util.open_anything())
- mock_execlp.assert_called_once_with('tagada', 'tagada', 'foo')
+ mock_open.return_value = "tagada"
+ util.interactive_open(["foo"], util.open_anything())
+ mock_execlp.assert_called_once_with("tagada", "tagada", "foo")
mock_execlp.reset_mock()
- util.interactive_open(['foo'], 'bar')
- mock_execlp.assert_called_once_with('bar', 'bar', 'foo')
+ util.interactive_open(["foo"], "bar")
+ mock_execlp.assert_called_once_with("bar", "bar", "foo")
def test_sanitize_unix_replaces_leading_dot(self):
with _common.platform_posix():
- p = util.sanitize_path('one/.two/three')
- self.assertFalse('.' in p)
+ p = util.sanitize_path("one/.two/three")
+ self.assertFalse("." in p)
def test_sanitize_windows_replaces_trailing_dot(self):
with _common.platform_windows():
- p = util.sanitize_path('one/two./three')
- self.assertFalse('.' in p)
+ p = util.sanitize_path("one/two./three")
+ self.assertFalse("." in p)
def test_sanitize_windows_replaces_illegal_chars(self):
with _common.platform_windows():
p = util.sanitize_path(':*?"<>|')
- self.assertFalse(':' in p)
- self.assertFalse('*' in p)
- self.assertFalse('?' in p)
+ self.assertFalse(":" in p)
+ self.assertFalse("*" in p)
+ self.assertFalse("?" in p)
self.assertFalse('"' in p)
- self.assertFalse('<' in p)
- self.assertFalse('>' in p)
- self.assertFalse('|' in p)
+ self.assertFalse("<" in p)
+ self.assertFalse(">" in p)
+ self.assertFalse("|" in p)
def test_sanitize_windows_replaces_trailing_space(self):
with _common.platform_windows():
- p = util.sanitize_path('one/two /three')
- self.assertFalse(' ' in p)
+ p = util.sanitize_path("one/two /three")
+ self.assertFalse(" " in p)
def test_sanitize_path_works_on_empty_string(self):
with _common.platform_posix():
- p = util.sanitize_path('')
- self.assertEqual(p, '')
+ p = util.sanitize_path("")
+ self.assertEqual(p, "")
def test_sanitize_with_custom_replace_overrides_built_in_sub(self):
with _common.platform_posix():
- p = util.sanitize_path('a/.?/b', [
- (re.compile(r'foo'), 'bar'),
- ])
- self.assertEqual(p, 'a/.?/b')
+ p = util.sanitize_path(
+ "a/.?/b",
+ [
+ (re.compile(r"foo"), "bar"),
+ ],
+ )
+ self.assertEqual(p, "a/.?/b")
def test_sanitize_with_custom_replace_adds_replacements(self):
with _common.platform_posix():
- p = util.sanitize_path('foo/bar', [
- (re.compile(r'foo'), 'bar'),
- ])
- self.assertEqual(p, 'bar/bar')
-
- @unittest.skip('unimplemented: #359')
+ p = util.sanitize_path(
+ "foo/bar",
+ [
+ (re.compile(r"foo"), "bar"),
+ ],
+ )
+ self.assertEqual(p, "bar/bar")
+
+ @unittest.skip("unimplemented: #359")
def test_sanitize_empty_component(self):
with _common.platform_posix():
- p = util.sanitize_path('foo//bar', [
- (re.compile(r'^$'), '_'),
- ])
- self.assertEqual(p, 'foo/_/bar')
-
- @unittest.skipIf(sys.platform == 'win32', 'win32')
+ p = util.sanitize_path(
+ "foo//bar",
+ [
+ (re.compile(r"^$"), "_"),
+ ],
+ )
+ self.assertEqual(p, "foo/_/bar")
+
+ @unittest.skipIf(sys.platform == "win32", "win32")
def test_convert_command_args_keeps_undecodeable_bytes(self):
- arg = b'\x82' # non-ascii bytes
+ arg = b"\x82" # non-ascii bytes
cmd_args = util.convert_command_args([arg])
- self.assertEqual(cmd_args[0],
- arg.decode(util.arg_encoding(), 'surrogateescape'))
+ self.assertEqual(
+ cmd_args[0], arg.decode(util.arg_encoding(), "surrogateescape")
+ )
- @patch('beets.util.subprocess.Popen')
+ @patch("beets.util.subprocess.Popen")
def test_command_output(self, mock_popen):
def popen_fail(*args, **kwargs):
m = Mock(returncode=1)
- m.communicate.return_value = 'foo', 'bar'
+ m.communicate.return_value = "foo", "bar"
return m
mock_popen.side_effect = popen_fail
with self.assertRaises(subprocess.CalledProcessError) as exc_context:
- util.command_output(['taga', '\xc3\xa9'])
+ util.command_output(["taga", "\xc3\xa9"])
self.assertEqual(exc_context.exception.returncode, 1)
- self.assertEqual(exc_context.exception.cmd, 'taga \xc3\xa9')
+ self.assertEqual(exc_context.exception.cmd, "taga \xc3\xa9")
def test_case_sensitive_default(self):
- path = util.bytestring_path(util.normpath(
- "/this/path/does/not/exist",
- ))
+ path = util.bytestring_path(
+ util.normpath(
+ "/this/path/does/not/exist",
+ )
+ )
self.assertEqual(
util.case_sensitive(path),
- platform.system() != 'Windows',
+ platform.system() != "Windows",
)
- @unittest.skipIf(sys.platform == 'win32', 'fs is not case sensitive')
+ @unittest.skipIf(sys.platform == "win32", "fs is not case sensitive")
def test_case_sensitive_detects_sensitive(self):
# FIXME: Add tests for more code paths of case_sensitive()
# when the filesystem on the test runner is not case sensitive
pass
- @unittest.skipIf(sys.platform != 'win32', 'fs is case sensitive')
+ @unittest.skipIf(sys.platform != "win32", "fs is case sensitive")
def test_case_sensitive_detects_insensitive(self):
# FIXME: Add tests for more code paths of case_sensitive()
# when the filesystem on the test runner is case sensitive
@@ -149,29 +160,29 @@ def test_case_sensitive_detects_insensitive(self):
class PathConversionTest(_common.TestCase):
def test_syspath_windows_format(self):
with _common.platform_windows():
- path = os.path.join('a', 'b', 'c')
+ path = os.path.join("a", "b", "c")
outpath = util.syspath(path)
self.assertTrue(isinstance(outpath, str))
- self.assertTrue(outpath.startswith('\\\\?\\'))
+ self.assertTrue(outpath.startswith("\\\\?\\"))
def test_syspath_windows_format_unc_path(self):
# The \\?\ prefix on Windows behaves differently with UNC
# (network share) paths.
- path = '\\\\server\\share\\file.mp3'
+ path = "\\\\server\\share\\file.mp3"
with _common.platform_windows():
outpath = util.syspath(path)
self.assertTrue(isinstance(outpath, str))
- self.assertEqual(outpath, '\\\\?\\UNC\\server\\share\\file.mp3')
+ self.assertEqual(outpath, "\\\\?\\UNC\\server\\share\\file.mp3")
def test_syspath_posix_unchanged(self):
with _common.platform_posix():
- path = os.path.join('a', 'b', 'c')
+ path = os.path.join("a", "b", "c")
outpath = util.syspath(path)
self.assertEqual(path, outpath)
def _windows_bytestring_path(self, path):
old_gfse = sys.getfilesystemencoding
- sys.getfilesystemencoding = lambda: 'mbcs'
+ sys.getfilesystemencoding = lambda: "mbcs"
try:
with _common.platform_windows():
return util.bytestring_path(path)
@@ -179,31 +190,31 @@ def _windows_bytestring_path(self, path):
sys.getfilesystemencoding = old_gfse
def test_bytestring_path_windows_encodes_utf8(self):
- path = 'caf\xe9'
+ path = "caf\xe9"
outpath = self._windows_bytestring_path(path)
- self.assertEqual(path, outpath.decode('utf-8'))
+ self.assertEqual(path, outpath.decode("utf-8"))
def test_bytesting_path_windows_removes_magic_prefix(self):
- path = '\\\\?\\C:\\caf\xe9'
+ path = "\\\\?\\C:\\caf\xe9"
outpath = self._windows_bytestring_path(path)
- self.assertEqual(outpath, 'C:\\caf\xe9'.encode())
+ self.assertEqual(outpath, "C:\\caf\xe9".encode())
class PathTruncationTest(_common.TestCase):
def test_truncate_bytestring(self):
with _common.platform_posix():
- p = util.truncate_path(b'abcde/fgh', 4)
- self.assertEqual(p, b'abcd/fgh')
+ p = util.truncate_path(b"abcde/fgh", 4)
+ self.assertEqual(p, b"abcd/fgh")
def test_truncate_unicode(self):
with _common.platform_posix():
- p = util.truncate_path('abcde/fgh', 4)
- self.assertEqual(p, 'abcd/fgh')
+ p = util.truncate_path("abcde/fgh", 4)
+ self.assertEqual(p, "abcd/fgh")
def test_truncate_preserves_extension(self):
with _common.platform_posix():
- p = util.truncate_path('abcde/fgh.ext', 5)
- self.assertEqual(p, 'abcde/f.ext')
+ p = util.truncate_path("abcde/fgh.ext", 5)
+ self.assertEqual(p, "abcde/f.ext")
class ConfitDeprecationTest(_common.TestCase):
@@ -224,5 +235,6 @@ def test_confit_deprecattion_warning_origin(self):
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/test_vfs.py b/test/test_vfs.py
index 4cd04e2131..28458aecf8 100644
--- a/test/test_vfs.py
+++ b/test/test_vfs.py
@@ -16,32 +16,38 @@
import unittest
from test import _common
-from beets import library
-from beets import vfs
+
+from beets import library, vfs
class VFSTest(_common.TestCase):
def setUp(self):
super().setUp()
- self.lib = library.Library(':memory:', path_formats=[
- ('default', 'albums/$album/$title'),
- ('singleton:true', 'tracks/$artist/$title'),
- ])
+ self.lib = library.Library(
+ ":memory:",
+ path_formats=[
+ ("default", "albums/$album/$title"),
+ ("singleton:true", "tracks/$artist/$title"),
+ ],
+ )
self.lib.add(_common.item())
self.lib.add_album([_common.item()])
self.tree = vfs.libtree(self.lib)
def test_singleton_item(self):
- self.assertEqual(self.tree.dirs['tracks'].dirs['the artist'].
- files['the title'], 1)
+ self.assertEqual(
+ self.tree.dirs["tracks"].dirs["the artist"].files["the title"], 1
+ )
def test_album_item(self):
- self.assertEqual(self.tree.dirs['albums'].dirs['the album'].
- files['the title'], 2)
+ self.assertEqual(
+ self.tree.dirs["albums"].dirs["the album"].files["the title"], 2
+ )
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")
diff --git a/test/testall.py b/test/testall.py
index 74236f5a03..b96815eb89 100755
--- a/test/testall.py
+++ b/test/testall.py
@@ -20,7 +20,7 @@
import sys
import unittest
-pkgpath = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) or '..'
+pkgpath = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) or ".."
sys.path.insert(0, pkgpath)
@@ -28,13 +28,13 @@ def suite():
s = unittest.TestSuite()
# Get the suite() of every module in this directory beginning with
# "test_".
- for fname in os.listdir(os.path.join(pkgpath, 'test')):
- match = re.match(r'(test_\S+)\.py$', fname)
+ for fname in os.listdir(os.path.join(pkgpath, "test")):
+ match = re.match(r"(test_\S+)\.py$", fname)
if match:
modname = match.group(1)
s.addTest(__import__(modname).suite())
return s
-if __name__ == '__main__':
- unittest.main(defaultTest='suite')
+if __name__ == "__main__":
+ unittest.main(defaultTest="suite")