Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 45 additions & 1 deletion beetsplug/duplicates.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@
import os
import shlex

from beets.dbcore.query import MatchQuery
from beets.library import Album, Item
from beets.plugins import BeetsPlugin
from beets.ui import Subcommand, UserError, print_
from beets.ui import Subcommand, UserError, colorize, print_
from beets.util import (
MoveOperation,
bytestring_path,
Expand Down Expand Up @@ -141,6 +142,49 @@ def __init__(self):
)
self._command.parser.add_all_common_options()

self.register_listener("import_task_created", self.import_task_created)

def import_task_created(self, task, session):
if "dedupe_mb_trackid_on_import" in self.config and self.config[
"dedupe_mb_trackid_on_import"
].get(bool):
return self._dedupe_task_on_mb_trackid(task, session)

def _dedupe_task_on_mb_trackid(self, task, session):
# Find all items that already have the same track imported
dupes = []
for item in task.items:
if not item.mb_trackid:
continue

# Query the library for any matching tracks
resp = session.lib.items(
query=MatchQuery("mb_trackid", item.mb_trackid)
)
if len(resp.rows) > 0:
log_prefix = f"{item.artist} - {item.album} - {item.title}"
print_(
f"{colorize('text_warning', log_prefix)}:"
"Item already imported, skipping..."
)
dupes.append(item)

# Remove the dupes
album = ""
for dup in dupes:
album = f"{dup.artist} - {dup.album}"
task.items.remove(dup)

# Get rid of the task if all items were removed
if len(task.items) == 0:
print_(
f"{colorize('text_warning', album)}:" if album != "" else "",
"All items removed due to duplicates, removing task",
)
return []

return [task]

def commands(self):
def _dup(lib, opts, args):
self.config.set_args(opts)
Expand Down
1 change: 0 additions & 1 deletion beetsplug/tidal/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,6 @@ def _get_album_info(
track_by_id: dict[str, TidalTrack],
artist_by_id: dict[str, TidalArtist],
) -> AlbumInfo:

track_infos: list[TrackInfo] = []
for i, track_rel in enumerate(
album["relationships"]["items"]["data"], start=1
Expand Down
2 changes: 2 additions & 0 deletions docs/changelog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ New features
See :doc:`plugins/tidal` for more information.

- Add support for adding or modifying a subtitle (ID3 tag ``TIT3``) field
- :doc:`plugins/duplicates`: ``dedupe_mb_trackid_on_import`` option added to
deduplicate during imports based on already-imported ``mb_trackid``.

Bug fixes
~~~~~~~~~
Expand Down
7 changes: 7 additions & 0 deletions docs/plugins/duplicates.rst
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,11 @@ duplicates or just the duplicates themselves via command-line switches
-t TAG, --tag=TAG tag matched items with 'k=v' attribute
-r, --remove remove items from library

Generally, beets operates on Albums or Track singletons. As a way to bridge the
gap, the ``duplicates`` plugin can be configured to duduplicate tracks on import
by checking for already-imported items with the same ``mb_trackid``. See the
``dedupe_mb_trackid_on_import`` option for details.

Configuration
-------------

Expand All @@ -54,6 +59,8 @@ file. The available options mirror the command-line options:
- **count**: Print a count of duplicate tracks or albums in the format
``$albumartist - $album - $title: $count`` (for tracks) or ``$albumartist -
$album: $count`` (for albums). Default: ``no``.
- **dedupe_mb_trackid_on_import**: Deduplicate album tracks when an
already-imported track has the same mb_trackid.
- **delete**: Remove matched items from the library and from the disk. Default:
``no``
- **format**: A specific format with which to print every track or album. This
Expand Down
Loading