mirror of
https://github.com/nathom/streamrip.git
synced 2025-05-09 14:11:55 -04:00
Update
This commit is contained in:
parent
06335058f3
commit
36fd27c83c
17 changed files with 738 additions and 212 deletions
36
streamrip/artwork.py
Normal file
36
streamrip/artwork.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
from PIL import Image
|
||||
|
||||
|
||||
def downscale_image(input_image_path: str, max_dimension: int):
|
||||
"""Downscale an image in place given a maximum allowed dimension.
|
||||
|
||||
Args:
|
||||
input_image_path (str): Path to image
|
||||
max_dimension (int): Maximum dimension allowed
|
||||
|
||||
Returns:
|
||||
|
||||
|
||||
"""
|
||||
# Open the image
|
||||
image = Image.open(input_image_path)
|
||||
|
||||
# Get the original width and height
|
||||
width, height = image.size
|
||||
|
||||
if max_dimension <= max(width, height):
|
||||
return
|
||||
|
||||
# Calculate the new dimensions while maintaining the aspect ratio
|
||||
if width > height:
|
||||
new_width = max_dimension
|
||||
new_height = int(height * (max_dimension / width))
|
||||
else:
|
||||
new_height = max_dimension
|
||||
new_width = int(width * (max_dimension / height))
|
||||
|
||||
# Resize the image with the new dimensions
|
||||
resized_image = image.resize((new_width, new_height))
|
||||
|
||||
# Save the resized image
|
||||
resized_image.save(input_image_path)
|
|
@ -1,5 +1,6 @@
|
|||
"""The clients that interact with the streaming service APIs."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional, Union
|
||||
|
@ -19,22 +20,23 @@ DEFAULT_USER_AGENT = (
|
|||
class Client(ABC):
|
||||
source: str
|
||||
max_quality: int
|
||||
session: aiohttp.ClientSession
|
||||
|
||||
@abstractmethod
|
||||
async def login(self):
|
||||
raise NotImplemented
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def get_metadata(self, item: dict[str, Union[str, int, float]], media_type):
|
||||
raise NotImplemented
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def search(self, query: str, media_type: str, limit: int = 500):
|
||||
raise NotImplemented
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def get_downloadable(self, item_id: str, quality: int) -> Downloadable:
|
||||
raise NotImplemented
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def get_rate_limiter(
|
||||
|
@ -47,21 +49,14 @@ class Client(ABC):
|
|||
)
|
||||
|
||||
@staticmethod
|
||||
def get_session(headers: Optional[dict] = None) -> aiohttp.ClientSession:
|
||||
async def get_session(headers: Optional[dict] = None) -> aiohttp.ClientSession:
|
||||
if headers is None:
|
||||
headers = {}
|
||||
return aiohttp.ClientSession(
|
||||
headers={"User-Agent": DEFAULT_USER_AGENT}, **headers
|
||||
)
|
||||
|
||||
|
||||
class NonStreamable(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MissingCredentials(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AuthenticationError(Exception):
|
||||
pass
|
||||
def __del__(self):
|
||||
# make sure http session is closed by end of program
|
||||
if hasattr(self, "session"):
|
||||
asyncio.run(self.session.close())
|
||||
|
|
|
@ -130,14 +130,15 @@ class ArtworkConfig:
|
|||
# The size of the artwork to embed. Options: thumbnail, small, large, original.
|
||||
# "original" images can be up to 30MB, and may fail embedding.
|
||||
# Using "large" is recommended.
|
||||
size: str
|
||||
embed_size: str
|
||||
# Both of these options limit the size of the embedded artwork. If their values
|
||||
# are larger than the actual dimensions of the image, they will be ignored.
|
||||
# If either value is -1, the image is left untouched.
|
||||
max_width: int
|
||||
max_height: int
|
||||
embed_max_width: int
|
||||
# Save the cover image at the highest quality as a seperate jpg file
|
||||
keep_hires_cover: bool
|
||||
save_artwork: bool
|
||||
# If artwork is saved, downscale it to these dimensions, or ignore if -1
|
||||
saved_max_width: int
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
|
@ -316,5 +317,6 @@ class Config:
|
|||
self.file.update_toml()
|
||||
toml_file.write(dumps(self.file.toml))
|
||||
|
||||
def __del__(self):
|
||||
self.save_file()
|
||||
@classmethod
|
||||
def defaults(cls):
|
||||
return cls(DEFAULT_CONFIG_PATH)
|
||||
|
|
|
@ -125,14 +125,16 @@ embed = true
|
|||
# The size of the artwork to embed. Options: thumbnail, small, large, original.
|
||||
# "original" images can be up to 30MB, and may fail embedding.
|
||||
# Using "large" is recommended.
|
||||
size = "large"
|
||||
# Both of these options limit the size of the embedded artwork. If their values
|
||||
# are larger than the actual dimensions of the image, they will be ignored.
|
||||
# If either value is -1, the image is left untouched.
|
||||
max_width = -1
|
||||
max_height = -1
|
||||
embed_size = "large"
|
||||
# If this is set to a value > 0, max(width, height) of the embedded art will be set to this value in pixels
|
||||
# Proportions of the image will remain the same
|
||||
embed_max_width = -1
|
||||
# Save the cover image at the highest quality as a seperate jpg file
|
||||
keep_hires_cover = true
|
||||
save_artwork = true
|
||||
# If this is set to a value > 0, max(width, height) of the saved art will be set to this value in pixels
|
||||
# Proportions of the image will remain the same
|
||||
saved_max_width = -1
|
||||
|
||||
|
||||
[metadata]
|
||||
# Sets the value of the 'ALBUM' field in the metadata to the playlist's name.
|
||||
|
@ -150,7 +152,7 @@ exclude = []
|
|||
# template
|
||||
add_singles_to_folder = false
|
||||
# Available keys: "albumartist", "title", "year", "bit_depth", "sampling_rate",
|
||||
# "container", "id", and "albumcomposer"
|
||||
# "id", and "albumcomposer"
|
||||
folder_format = "{albumartist} - {title} ({year}) [{container}] [{bit_depth}B-{sampling_rate}kHz]"
|
||||
# Available keys: "tracknumber", "artist", "albumartist", "composer", "title",
|
||||
# and "albumcomposer"
|
||||
|
|
|
@ -4,9 +4,10 @@ import hashlib
|
|||
import deezer
|
||||
from Cryptodome.Cipher import AES
|
||||
|
||||
from .client import AuthenticationError, Client, MissingCredentials, NonStreamable
|
||||
from .client import Client
|
||||
from .config import Config
|
||||
from .downloadable import DeezerDownloadable
|
||||
from .exceptions import AuthenticationError, MissingCredentials, NonStreamable
|
||||
|
||||
|
||||
class DeezerClient(Client):
|
||||
|
@ -120,7 +121,7 @@ class DeezerClient(Client):
|
|||
)
|
||||
|
||||
dl_info["url"] = url
|
||||
return DeezerDownloadable(dl_info)
|
||||
return DeezerDownloadable(self.session, dl_info)
|
||||
|
||||
def _get_encrypted_file_url(
|
||||
self, meta_id: str, track_hash: str, media_version: str
|
||||
|
|
|
@ -10,7 +10,7 @@ import subprocess
|
|||
import tempfile
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Callable, Optional
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import aiofiles
|
||||
import aiohttp
|
||||
|
@ -18,7 +18,7 @@ import m3u8
|
|||
from Cryptodome.Cipher import Blowfish
|
||||
|
||||
from . import converter
|
||||
from .client import NonStreamable
|
||||
from .exceptions import NonStreamable
|
||||
|
||||
|
||||
def generate_temp_path(url: str):
|
||||
|
@ -34,7 +34,7 @@ class Downloadable(ABC):
|
|||
chunk_size = 1024
|
||||
_size: Optional[int] = None
|
||||
|
||||
async def download(self, path: str, callback: Callable[[int], None]):
|
||||
async def download(self, path: str, callback: Callable[[int], Any]):
|
||||
tmp = generate_temp_path(self.url)
|
||||
await self._download(tmp, callback)
|
||||
shutil.move(tmp, path)
|
||||
|
@ -52,15 +52,17 @@ class Downloadable(ABC):
|
|||
async def _download(self, path: str, callback: Callable[[int], None]):
|
||||
raise NotImplemented
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.__class__.__name__}({self.__dict__})"
|
||||
|
||||
|
||||
class BasicDownloadable(Downloadable):
|
||||
"""Just downloads a URL."""
|
||||
|
||||
def __init__(self, session: aiohttp.ClientSession, url: str):
|
||||
def __init__(self, session: aiohttp.ClientSession, url: str, extension: str):
|
||||
self.session = session
|
||||
self.url = url
|
||||
# TODO: verify that this is correct
|
||||
self.extension = url.split(".")[-1]
|
||||
self.extension = extension
|
||||
|
||||
async def _download(self, path: str, callback: Callable[[int], None]):
|
||||
async with self.session.get(
|
||||
|
@ -173,7 +175,7 @@ class TidalDownloadable(Downloadable):
|
|||
raise NonStreamable(f"Tidal download: dl_info = {info}")
|
||||
|
||||
assert isinstance(url, str)
|
||||
self.downloadable = BasicDownloadable(session, url)
|
||||
self.downloadable = BasicDownloadable(session, url, "m4a")
|
||||
|
||||
async def _download(self, path: str, callback):
|
||||
await self.downloadable._download(path, callback)
|
||||
|
@ -198,7 +200,7 @@ class SoundcloudDownloadable(Downloadable):
|
|||
await self._download_original(path, callback)
|
||||
|
||||
async def _download_original(self, path: str, callback):
|
||||
downloader = BasicDownloadable(self.session, self.url)
|
||||
downloader = BasicDownloadable(self.session, self.url, "flac")
|
||||
await downloader.download(path, callback)
|
||||
engine = converter.FLAC(path)
|
||||
engine.convert(path)
|
||||
|
|
|
@ -4,23 +4,20 @@ from __future__ import annotations
|
|||
|
||||
import logging
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
from dataclasses import dataclass
|
||||
from string import Formatter
|
||||
from typing import Generator, Hashable, Iterable, Optional, Type, Union
|
||||
from typing import Optional, Type, TypeVar
|
||||
|
||||
from .constants import (
|
||||
ALBUM_KEYS,
|
||||
COPYRIGHT,
|
||||
FLAC_KEY,
|
||||
MP3_KEY,
|
||||
MP4_KEY,
|
||||
PHON_COPYRIGHT,
|
||||
TIDAL_Q_MAP,
|
||||
TRACK_KEYS,
|
||||
)
|
||||
from .exceptions import InvalidContainerError, InvalidSourceError
|
||||
from .utils import get_cover_urls, get_quality_id
|
||||
# from .constants import (
|
||||
# ALBUM_KEYS,
|
||||
# COPYRIGHT,
|
||||
# FLAC_KEY,
|
||||
# MP3_KEY,
|
||||
# MP4_KEY,
|
||||
# PHON_COPYRIGHT,
|
||||
# TIDAL_Q_MAP,
|
||||
# TRACK_KEYS,
|
||||
# )
|
||||
|
||||
logger = logging.getLogger("streamrip")
|
||||
|
||||
|
@ -32,16 +29,85 @@ def get_album_track_ids(source: str, resp) -> list[str]:
|
|||
return [track["id"] for track in tracklist]
|
||||
|
||||
|
||||
# (url to cover, downloaded path of cover)
|
||||
@dataclass(slots=True)
|
||||
class CoverUrls:
|
||||
thumbnail: Optional[str]
|
||||
small: Optional[str]
|
||||
large: Optional[str]
|
||||
original: Optional[str]
|
||||
class Covers:
|
||||
CoverEntry = tuple[str | None, str | None]
|
||||
thumbnail: CoverEntry
|
||||
small: CoverEntry
|
||||
large: CoverEntry
|
||||
original: CoverEntry
|
||||
|
||||
def largest(self) -> Optional[str]:
|
||||
# Return first non-None item
|
||||
return self.original or self.large or self.small or self.thumbnail
|
||||
def empty(self) -> bool:
|
||||
return all(
|
||||
url is None
|
||||
for url, _ in (self.original, self.large, self.small, self.thumbnail)
|
||||
)
|
||||
|
||||
def largest(self) -> CoverEntry:
|
||||
# Return first item with url
|
||||
if self.original[0]:
|
||||
return self.original
|
||||
|
||||
if self.large[0]:
|
||||
return self.large
|
||||
|
||||
if self.small[0]:
|
||||
return self.small
|
||||
|
||||
if self.thumbnail[0]:
|
||||
return self.thumbnail
|
||||
|
||||
raise Exception("No covers found")
|
||||
|
||||
@classmethod
|
||||
def from_qobuz(cls, resp):
|
||||
cover_urls = {k: (v, None) for k, v in resp["image"].items()}
|
||||
cover_urls["original"] = ("org".join(cover_urls["large"].rsplit("600", 1)), None) # type: ignore
|
||||
return cls(**cover_urls) # type: ignore
|
||||
|
||||
def get_size(self, size: str) -> CoverEntry:
|
||||
"""Get the cover size, or the largest cover smaller than `size`.
|
||||
|
||||
Args:
|
||||
size (str):
|
||||
|
||||
Returns:
|
||||
CoverEntry
|
||||
|
||||
|
||||
Raises:
|
||||
Exception: If a suitable cover doesn't exist
|
||||
|
||||
"""
|
||||
fallback = False
|
||||
if size == "original":
|
||||
if self.original[0] is not None:
|
||||
return self.original
|
||||
else:
|
||||
fallback = True
|
||||
|
||||
if fallback or size == "large":
|
||||
if self.large[0] is not None:
|
||||
return self.large
|
||||
else:
|
||||
fallback = True
|
||||
|
||||
if fallback or size == "small":
|
||||
if self.small[0] is not None:
|
||||
return self.small
|
||||
else:
|
||||
fallback = True
|
||||
|
||||
# At this point, either size == 'thumbnail' or nothing else was found
|
||||
if self.thumbnail[0] is None:
|
||||
raise Exception(f"No covers found for {size = }. Covers: {self}")
|
||||
|
||||
return self.thumbnail
|
||||
|
||||
|
||||
COPYRIGHT = "\u2117"
|
||||
PHON_COPYRIGHT = "\u00a9"
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
|
@ -57,7 +123,31 @@ class TrackMetadata:
|
|||
|
||||
@classmethod
|
||||
def from_qobuz(cls, album: AlbumMetadata, resp) -> TrackMetadata:
|
||||
raise NotImplemented
|
||||
title = typed(resp["title"].strip(), str)
|
||||
|
||||
version = resp.get("version")
|
||||
work = resp.get("work")
|
||||
if version is not None and version not in title:
|
||||
title = f"{title} ({version})"
|
||||
if work is not None and work not in title:
|
||||
title = f"{work}: {title}"
|
||||
|
||||
composer = typed(resp.get("composer", {}).get("name"), str | None)
|
||||
tracknumber = typed(resp.get("track_number", 1), int)
|
||||
discnumber = typed(resp.get("media_number", 1), int)
|
||||
artist = typed(safe_get(resp, "performer", "name"), str)
|
||||
track_id = typed(resp["id"], str)
|
||||
|
||||
info = TrackInfo(id=track_id, quality=album.info.quality)
|
||||
return cls(
|
||||
info=info,
|
||||
title=title,
|
||||
album=album,
|
||||
artist=artist,
|
||||
tracknumber=tracknumber,
|
||||
discnumber=discnumber,
|
||||
composer=composer,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_deezer(cls, album: AlbumMetadata, resp) -> TrackMetadata:
|
||||
|
@ -83,8 +173,18 @@ class TrackMetadata:
|
|||
return cls.from_deezer(album, resp)
|
||||
raise Exception
|
||||
|
||||
def format_track_path(self, formatter: str):
|
||||
pass
|
||||
def format_track_path(self, formatter: str) -> str:
|
||||
# Available keys: "tracknumber", "artist", "albumartist", "composer", "title",
|
||||
# and "albumcomposer"
|
||||
info = {
|
||||
"title": self.title,
|
||||
"tracknumber": self.tracknumber,
|
||||
"artist": self.artist,
|
||||
"albumartist": self.album.albumartist,
|
||||
"albumcomposer": self.album.albumcomposer or "None",
|
||||
"composer": self.composer or "None",
|
||||
}
|
||||
return formatter.format(**info)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
|
@ -107,13 +207,12 @@ class AlbumMetadata:
|
|||
albumartist: str
|
||||
year: str
|
||||
genre: list[str]
|
||||
covers: CoverUrls
|
||||
covers: Covers
|
||||
|
||||
albumcomposer: Optional[str] = None
|
||||
comment: Optional[str] = None
|
||||
compilation: Optional[str] = None
|
||||
copyright: Optional[str] = None
|
||||
cover: Optional[str] = None
|
||||
date: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
disctotal: Optional[int] = None
|
||||
|
@ -123,6 +222,20 @@ class AlbumMetadata:
|
|||
purchase_date: Optional[str] = None
|
||||
tracktotal: Optional[int] = None
|
||||
|
||||
def format_folder_path(self, formatter: str) -> str:
|
||||
# Available keys: "albumartist", "title", "year", "bit_depth", "sampling_rate",
|
||||
# "id", and "albumcomposer"
|
||||
info = {
|
||||
"albumartist": self.albumartist,
|
||||
"albumcomposer": self.albumcomposer or "None",
|
||||
"bit_depth": self.info.bit_depth,
|
||||
"id": self.info.id,
|
||||
"sampling_rate": self.info.sampling_rate,
|
||||
"title": self.album,
|
||||
"year": self.year,
|
||||
}
|
||||
return formatter.format(**info)
|
||||
|
||||
@classmethod
|
||||
def from_qobuz(cls, resp) -> AlbumMetadata:
|
||||
album = resp.get("title", "Unknown Album")
|
||||
|
@ -131,77 +244,84 @@ class AlbumMetadata:
|
|||
genres = list(set(re.findall(r"([^\u2192\/]+)", "/".join(genre))))
|
||||
date = resp.get("release_date_original") or resp.get("release_date")
|
||||
year = date[:4]
|
||||
copyright = resp.get("copyright")
|
||||
|
||||
_copyright = resp.get("copyright")
|
||||
_copyright = re.sub(r"(?i)\(P\)", PHON_COPYRIGHT, _copyright)
|
||||
_copyright = re.sub(r"(?i)\(C\)", COPYRIGHT, _copyright)
|
||||
|
||||
if artists := resp.get("artists"):
|
||||
albumartist = ", ".join(a["name"] for a in artists)
|
||||
else:
|
||||
albumartist = safe_get(resp, "artist", "name")
|
||||
albumartist = typed(safe_get(resp, "artist", "name"), str)
|
||||
|
||||
albumcomposer = safe_get(resp, "composer", "name")
|
||||
label = resp.get("label")
|
||||
description = resp.get("description")
|
||||
disctotal = (
|
||||
albumcomposer = typed(safe_get(resp, "composer", "name"), str | None)
|
||||
_label = resp.get("label")
|
||||
if isinstance(_label, dict):
|
||||
_label = _label["name"]
|
||||
label = typed(_label, str | None)
|
||||
description = typed(resp.get("description"), str | None)
|
||||
disctotal = typed(
|
||||
max(
|
||||
track.get("media_number", 1)
|
||||
for track in safe_get(resp, "tracks", "items", default=[{}])
|
||||
for track in safe_get(resp, "tracks", "items", default=[{}]) # type: ignore
|
||||
)
|
||||
or 1
|
||||
or 1,
|
||||
int,
|
||||
)
|
||||
explicit = resp.get("parental_warning", False)
|
||||
|
||||
if isinstance(label, dict):
|
||||
label = self.label.get("name")
|
||||
explicit = typed(resp.get("parental_warning", False), bool)
|
||||
|
||||
# Non-embedded information
|
||||
version = resp.get("version")
|
||||
cover_urls = CoverUrls.from_qobuz(resp)
|
||||
streamable = resp.get("streamable", False)
|
||||
bit_depth = resp.get("maximum_bit_depth")
|
||||
sampling_rate = resp.get("maximum_sampling_rate")
|
||||
quality = get_quality_id(self.bit_depth, self.sampling_rate)
|
||||
# version = resp.get("version")
|
||||
cover_urls = Covers.from_qobuz(resp)
|
||||
streamable = typed(resp.get("streamable", False), bool)
|
||||
assert streamable
|
||||
bit_depth = typed(resp.get("maximum_bit_depth"), int | None)
|
||||
sampling_rate = typed(resp.get("maximum_sampling_rate"), int | None)
|
||||
quality = get_quality_id(bit_depth, sampling_rate)
|
||||
booklets = resp.get("goodies")
|
||||
item_id = resp.get("id")
|
||||
|
||||
if sampling_rate is not None:
|
||||
sampling_rate *= 1000
|
||||
|
||||
info = AlbumInfo(item_id, quality, explicit, sampling_rate, bit_depth, booklets)
|
||||
info = AlbumInfo(
|
||||
item_id, quality, label, explicit, sampling_rate, bit_depth, booklets
|
||||
)
|
||||
return AlbumMetadata(
|
||||
info,
|
||||
album,
|
||||
albumartist,
|
||||
year,
|
||||
genre=genres,
|
||||
covers=cover_urls,
|
||||
albumcomposer,
|
||||
comment,
|
||||
compilation,
|
||||
copyright(),
|
||||
cover,
|
||||
date,
|
||||
description,
|
||||
disctotal,
|
||||
encoder,
|
||||
grouping,
|
||||
lyrics,
|
||||
purchase_date,
|
||||
tracktotal,
|
||||
albumcomposer=albumcomposer,
|
||||
comment=None,
|
||||
compilation=None,
|
||||
copyright=_copyright,
|
||||
date=date,
|
||||
description=description,
|
||||
disctotal=disctotal,
|
||||
encoder=None,
|
||||
grouping=None,
|
||||
lyrics=None,
|
||||
purchase_date=None,
|
||||
tracktotal=tracktotal,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_deezer(cls, resp) -> AlbumMetadata:
|
||||
raise NotImplemented
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def from_soundcloud(cls, resp) -> AlbumMetadata:
|
||||
raise NotImplemented
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def from_tidal(cls, resp) -> AlbumMetadata:
|
||||
raise NotImplemented
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def from_resp(cls, source, resp) -> AlbumMetadata:
|
||||
def from_resp(cls, resp, source) -> AlbumMetadata:
|
||||
if source == "qobuz":
|
||||
return cls.from_qobuz(resp)
|
||||
if source == "tidal":
|
||||
|
@ -210,13 +330,14 @@ class AlbumMetadata:
|
|||
return cls.from_soundcloud(resp)
|
||||
if source == "deezer":
|
||||
return cls.from_deezer(resp)
|
||||
raise Exception
|
||||
raise Exception("Invalid source")
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class AlbumInfo:
|
||||
id: str
|
||||
quality: int
|
||||
label: Optional[str] = None
|
||||
explicit: bool = False
|
||||
sampling_rate: Optional[int] = None
|
||||
bit_depth: Optional[int] = None
|
||||
|
@ -232,7 +353,7 @@ def keys_in_format_string(s: str):
|
|||
return [f[1] for f in _formatter.parse(s) if f[1] is not None]
|
||||
|
||||
|
||||
def safe_get(d: dict, *keys, default=None):
|
||||
def safe_get(d: dict, *keys, default=None) -> dict | str | int | list | None:
|
||||
"""Nested __getitem__ calls with a default value.
|
||||
|
||||
Use to avoid key not found errors.
|
||||
|
@ -243,3 +364,35 @@ def safe_get(d: dict, *keys, default=None):
|
|||
if _d == {}:
|
||||
return default
|
||||
return _d
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def typed(thing, expected_type: Type[T]) -> T:
|
||||
assert isinstance(thing, expected_type)
|
||||
return thing
|
||||
|
||||
|
||||
def get_quality_id(bit_depth: Optional[int], sampling_rate: Optional[int]) -> int:
|
||||
"""Get the universal quality id from bit depth and sampling rate.
|
||||
|
||||
:param bit_depth:
|
||||
:type bit_depth: Optional[int]
|
||||
:param sampling_rate: In kHz
|
||||
:type sampling_rate: Optional[int]
|
||||
"""
|
||||
# XXX: Should `0` quality be supported?
|
||||
if bit_depth is None or sampling_rate is None: # is lossy
|
||||
return 1
|
||||
|
||||
if bit_depth == 16:
|
||||
return 2
|
||||
|
||||
if bit_depth == 24:
|
||||
if sampling_rate <= 96:
|
||||
return 3
|
||||
|
||||
return 4
|
||||
|
||||
raise Exception(f"Invalid {bit_depth = }")
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
from typing import Optional
|
||||
|
||||
from click import style
|
||||
from tqdm import tqdm
|
||||
from tqdm.asyncio import tqdm
|
||||
|
||||
from .config import Config
|
||||
|
||||
THEMES = {
|
||||
"plain": None,
|
||||
|
@ -14,8 +16,8 @@ THEMES = {
|
|||
}
|
||||
|
||||
|
||||
def get_progress_bar(total, theme="dainty", desc: Optional[str] = None, unit="B"):
|
||||
theme = THEMES[theme]
|
||||
def get_progress_bar(config: Config, total: int, desc: Optional[str], unit="B"):
|
||||
theme = THEMES[config.session.theme.progress_bar]
|
||||
return tqdm(
|
||||
total=total,
|
||||
unit=unit,
|
||||
|
|
|
@ -5,9 +5,10 @@ from getpass import getpass
|
|||
|
||||
from click import launch, secho, style
|
||||
|
||||
from .client import AuthenticationError, Client, MissingCredentials
|
||||
from .client import Client
|
||||
from .config import Config
|
||||
from .deezer_client import DeezerClient
|
||||
from .exceptions import AuthenticationError, MissingCredentials
|
||||
from .qobuz_client import QobuzClient
|
||||
from .tidal_client import TidalClient
|
||||
|
||||
|
|
|
@ -5,10 +5,7 @@ import re
|
|||
import time
|
||||
from typing import AsyncGenerator, Optional
|
||||
|
||||
import aiohttp
|
||||
from aiolimiter import AsyncLimiter
|
||||
|
||||
from .client import DEFAULT_USER_AGENT, Client
|
||||
from .client import Client
|
||||
from .config import Config
|
||||
from .downloadable import BasicDownloadable, Downloadable
|
||||
from .exceptions import (
|
||||
|
@ -51,13 +48,13 @@ class QobuzClient(Client):
|
|||
def __init__(self, config: Config):
|
||||
self.logged_in = False
|
||||
self.config = config
|
||||
self.session = self.get_session()
|
||||
self.rate_limiter = self.get_rate_limiter(
|
||||
config.session.downloads.requests_per_minute
|
||||
)
|
||||
self.secret: Optional[str] = None
|
||||
|
||||
async def login(self):
|
||||
self.session = await self.get_session()
|
||||
c = self.config.session.qobuz
|
||||
if not c.email_or_userid or not c.password_or_token:
|
||||
raise MissingCredentials
|
||||
|
@ -65,11 +62,13 @@ class QobuzClient(Client):
|
|||
assert not self.logged_in, "Already logged in"
|
||||
|
||||
if not c.app_id or not c.secrets:
|
||||
logger.info("App id/secrets not found, fetching")
|
||||
c.app_id, c.secrets = await self._get_app_id_and_secrets()
|
||||
# write to file
|
||||
self.config.file.qobuz.app_id = c.app_id
|
||||
self.config.file.qobuz.secrets = c.secrets
|
||||
self.config.file.set_modified()
|
||||
logger.debug(f"Found {c.app_id = } {c.secrets = }")
|
||||
|
||||
self.session.headers.update({"X-App-Id": c.app_id})
|
||||
self.secret = await self._get_valid_secret(c.secrets)
|
||||
|
@ -87,22 +86,21 @@ class QobuzClient(Client):
|
|||
"app_id": c.app_id,
|
||||
}
|
||||
|
||||
resp = await self._api_request("user/login", params)
|
||||
logger.debug("Request params %s", params)
|
||||
status, resp = await self._api_request("user/login", params)
|
||||
logger.debug("Login resp: %s", resp)
|
||||
|
||||
if resp.status == 401:
|
||||
if status == 401:
|
||||
raise AuthenticationError(f"Invalid credentials from params {params}")
|
||||
elif resp.status == 400:
|
||||
logger.debug(resp)
|
||||
elif status == 400:
|
||||
raise InvalidAppIdError(f"Invalid app id from params {params}")
|
||||
|
||||
logger.info("Logged in to Qobuz")
|
||||
|
||||
resp_json = await resp.json()
|
||||
|
||||
if not resp_json["user"]["credential"]["parameters"]:
|
||||
if not resp["user"]["credential"]["parameters"]:
|
||||
raise IneligibleError("Free accounts are not eligible to download tracks.")
|
||||
|
||||
uat = resp_json["user_auth_token"]
|
||||
uat = resp["user_auth_token"]
|
||||
self.session.headers.update({"X-User-Auth-Token": uat})
|
||||
# label = resp_json["user"]["credential"]["parameters"]["short_label"]
|
||||
|
||||
|
@ -131,20 +129,19 @@ class QobuzClient(Client):
|
|||
|
||||
epoint = f"{media_type}/get"
|
||||
|
||||
response = await self._api_request(epoint, params)
|
||||
resp_json = await response.json()
|
||||
status, resp = await self._api_request(epoint, params)
|
||||
|
||||
if response.status != 200:
|
||||
raise Exception(f'Error fetching metadata. "{resp_json["message"]}"')
|
||||
if status != 200:
|
||||
raise Exception(f'Error fetching metadata. "{resp["message"]}"')
|
||||
|
||||
return resp_json
|
||||
return resp
|
||||
|
||||
async def search(
|
||||
self, query: str, media_type: str, limit: int = 500
|
||||
) -> AsyncGenerator:
|
||||
params = {
|
||||
"query": query,
|
||||
"limit": limit,
|
||||
# "limit": limit,
|
||||
}
|
||||
# TODO: move featured, favorites, and playlists into _api_get later
|
||||
if media_type == "featured":
|
||||
|
@ -164,13 +161,15 @@ class QobuzClient(Client):
|
|||
else:
|
||||
epoint = f"{media_type}/search"
|
||||
|
||||
return self._paginate(epoint, params)
|
||||
async for status, resp in self._paginate(epoint, params, limit=limit):
|
||||
assert status == 200
|
||||
yield resp
|
||||
|
||||
async def get_downloadable(self, item_id: str, quality: int) -> Downloadable:
|
||||
assert self.secret is not None and self.logged_in and 1 <= quality <= 4
|
||||
|
||||
resp = await self._request_file_url(item_id, quality, self.secret)
|
||||
resp_json = await resp.json()
|
||||
status, resp_json = await self._request_file_url(item_id, quality, self.secret)
|
||||
assert status == 200
|
||||
stream_url = resp_json.get("url")
|
||||
|
||||
if stream_url is None:
|
||||
|
@ -183,33 +182,52 @@ class QobuzClient(Client):
|
|||
)
|
||||
raise NonStreamable
|
||||
|
||||
return BasicDownloadable(self.session, stream_url)
|
||||
return BasicDownloadable(
|
||||
self.session, stream_url, "flac" if quality > 1 else "mp3"
|
||||
)
|
||||
|
||||
async def _paginate(self, epoint: str, params: dict) -> AsyncGenerator[dict, None]:
|
||||
response = await self._api_request(epoint, params)
|
||||
page = await response.json()
|
||||
logger.debug("Keys returned from _gen_pages: %s", ", ".join(page.keys()))
|
||||
async def _paginate(
|
||||
self, epoint: str, params: dict, limit: Optional[int] = None
|
||||
) -> AsyncGenerator[tuple[int, dict], None]:
|
||||
"""Paginate search results.
|
||||
|
||||
params:
|
||||
limit: If None, all the results are yielded. Otherwise a maximum
|
||||
of `limit` results are yielded.
|
||||
|
||||
returns:
|
||||
Generator that yields (status code, response) tuples
|
||||
"""
|
||||
params.update({"limit": limit or 500})
|
||||
status, page = await self._api_request(epoint, params)
|
||||
logger.debug("paginate: initial request made with status %d", status)
|
||||
# albums, tracks, etc.
|
||||
key = epoint.split("/")[0] + "s"
|
||||
total = page.get(key, {})
|
||||
total = total.get("total") or total.get("items")
|
||||
items = page.get(key, {})
|
||||
total = items.get("total", 0) or items.get("items", 0)
|
||||
if limit is not None and limit < total:
|
||||
total = limit
|
||||
|
||||
logger.debug("paginate: %d total items requested", total)
|
||||
|
||||
if not total:
|
||||
logger.debug("Nothing found from %s epoint", epoint)
|
||||
return
|
||||
|
||||
limit = page.get(key, {}).get("limit", 500)
|
||||
offset = page.get(key, {}).get("offset", 0)
|
||||
limit = int(page.get(key, {}).get("limit", 500))
|
||||
offset = int(page.get(key, {}).get("offset", 0))
|
||||
|
||||
logger.debug("paginate: from response: limit=%d, offset=%d", limit, offset)
|
||||
params.update({"limit": limit})
|
||||
yield page
|
||||
yield status, page
|
||||
while (offset + limit) < total:
|
||||
offset += limit
|
||||
params.update({"offset": offset})
|
||||
response = await self._api_request(epoint, params)
|
||||
yield await response.json()
|
||||
yield await self._api_request(epoint, params)
|
||||
|
||||
async def _get_app_id_and_secrets(self) -> tuple[str, list[str]]:
|
||||
spoofer = QobuzSpoofer()
|
||||
return await spoofer.get_app_id_and_secrets()
|
||||
async with QobuzSpoofer() as spoofer:
|
||||
return await spoofer.get_app_id_and_secrets()
|
||||
|
||||
async def _get_valid_secret(self, secrets: list[str]) -> str:
|
||||
results = await asyncio.gather(
|
||||
|
@ -223,15 +241,18 @@ class QobuzClient(Client):
|
|||
return working_secrets[0]
|
||||
|
||||
async def _test_secret(self, secret: str) -> Optional[str]:
|
||||
resp = await self._request_file_url("19512574", 1, secret)
|
||||
if resp.status == 400:
|
||||
status, _ = await self._request_file_url("19512574", 4, secret)
|
||||
if status == 400:
|
||||
return None
|
||||
resp.raise_for_status()
|
||||
return secret
|
||||
if status == 200:
|
||||
return secret
|
||||
logger.warning("Got status %d when testing secret", status)
|
||||
return None
|
||||
|
||||
async def _request_file_url(
|
||||
self, track_id: str, quality: int, secret: str
|
||||
) -> aiohttp.ClientResponse:
|
||||
) -> tuple[int, dict]:
|
||||
quality = self.get_quality(quality)
|
||||
unix_ts = time.time()
|
||||
r_sig = f"trackgetFileUrlformat_id{quality}intentstreamtrack_id{track_id}{unix_ts}{secret}"
|
||||
logger.debug("Raw request signature: %s", r_sig)
|
||||
|
@ -246,11 +267,24 @@ class QobuzClient(Client):
|
|||
}
|
||||
return await self._api_request("track/getFileUrl", params)
|
||||
|
||||
async def _api_request(self, epoint: str, params: dict) -> aiohttp.ClientResponse:
|
||||
async def _api_request(self, epoint: str, params: dict) -> tuple[int, dict]:
|
||||
"""Make a request to the API.
|
||||
returns: status code, json parsed response
|
||||
"""
|
||||
url = f"{QOBUZ_BASE_URL}/{epoint}"
|
||||
logger.debug("api_request: endpoint=%s, params=%s", epoint, params)
|
||||
if self.rate_limiter is not None:
|
||||
async with self.rate_limiter:
|
||||
async with self.session.get(url, params=params) as response:
|
||||
return response
|
||||
async with self.session.get(
|
||||
url, params=params, encoding="utf-8"
|
||||
) as response:
|
||||
return response.status, await response.json()
|
||||
# return await self.session.get(url, params=params)
|
||||
async with self.session.get(url, params=params) as response:
|
||||
return response
|
||||
resp_json = await response.json()
|
||||
return response.status, resp_json
|
||||
|
||||
@staticmethod
|
||||
def get_quality(quality: int):
|
||||
quality_map = (5, 6, 7, 27)
|
||||
return quality_map[quality - 1]
|
||||
|
|
|
@ -28,9 +28,10 @@ class QobuzSpoofer:
|
|||
self.app_id_regex = (
|
||||
r'production:{api:{appId:"(?P<app_id>\d{9})",appSecret:"(\w{32})'
|
||||
)
|
||||
self.session = aiohttp.ClientSession()
|
||||
self.session = None
|
||||
|
||||
async def get_app_id_and_secrets(self) -> tuple[str, list[str]]:
|
||||
assert self.session is not None
|
||||
async with self.session.get("https://play.qobuz.com/login") as req:
|
||||
login_page = await req.text()
|
||||
|
||||
|
@ -88,3 +89,12 @@ class QobuzSpoofer:
|
|||
secrets_list = vals
|
||||
|
||||
return app_id, secrets_list
|
||||
|
||||
async def __aenter__(self):
|
||||
self.session = aiohttp.ClientSession()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *_):
|
||||
if self.session is not None:
|
||||
await self.session.close()
|
||||
self.session = None
|
||||
|
|
|
@ -15,12 +15,12 @@ class SoundcloudClient(Client):
|
|||
def __init__(self, config: Config):
|
||||
self.global_config = config
|
||||
self.config = config.session.soundcloud
|
||||
self.session = self.get_session()
|
||||
self.rate_limiter = self.get_rate_limiter(
|
||||
config.session.downloads.requests_per_minute
|
||||
)
|
||||
|
||||
async def login(self):
|
||||
self.session = await self.get_session()
|
||||
client_id, app_version = self.config.client_id, self.config.app_version
|
||||
if not client_id or not app_version or not self._announce():
|
||||
client_id, app_version = await self._refresh_tokens()
|
||||
|
|
|
@ -1,12 +1,94 @@
|
|||
import os
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Generator
|
||||
|
||||
import aiofiles
|
||||
import mutagen.id3 as id3
|
||||
from mutagen.flac import FLAC, Picture
|
||||
from mutagen.id3 import APIC, ID3, ID3NoHeaderError
|
||||
from mutagen.mp4 import MP4, MP4Cover
|
||||
|
||||
from .metadata import TrackMetadata
|
||||
from .metadata import Covers, TrackMetadata
|
||||
|
||||
FLAC_MAX_BLOCKSIZE = 16777215 # 16.7 MB
|
||||
|
||||
MP4_KEYS = (
|
||||
"\xa9nam",
|
||||
"\xa9ART",
|
||||
"\xa9alb",
|
||||
r"aART",
|
||||
"\xa9day",
|
||||
"\xa9day",
|
||||
"\xa9cmt",
|
||||
"desc",
|
||||
"purd",
|
||||
"\xa9grp",
|
||||
"\xa9gen",
|
||||
"\xa9lyr",
|
||||
"\xa9too",
|
||||
"cprt",
|
||||
"cpil",
|
||||
"covr",
|
||||
"trkn",
|
||||
"disk",
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
MP3_KEYS = (
|
||||
id3.TIT2,
|
||||
id3.TPE1,
|
||||
id3.TALB,
|
||||
id3.TPE2,
|
||||
id3.TCOM,
|
||||
id3.TYER,
|
||||
id3.COMM,
|
||||
id3.TT1,
|
||||
id3.TT1,
|
||||
id3.GP1,
|
||||
id3.TCON,
|
||||
id3.USLT,
|
||||
id3.TEN,
|
||||
id3.TCOP,
|
||||
id3.TCMP,
|
||||
None,
|
||||
id3.TRCK,
|
||||
id3.TPOS,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
METADATA_TYPES = (
|
||||
"title",
|
||||
"artist",
|
||||
"album",
|
||||
"albumartist",
|
||||
"composer",
|
||||
"year",
|
||||
"comment",
|
||||
"description",
|
||||
"purchase_date",
|
||||
"grouping",
|
||||
"genre",
|
||||
"lyrics",
|
||||
"encoder",
|
||||
"copyright",
|
||||
"compilation",
|
||||
"cover",
|
||||
"tracknumber",
|
||||
"discnumber",
|
||||
"tracktotal",
|
||||
"disctotal",
|
||||
"date",
|
||||
)
|
||||
|
||||
|
||||
FLAC_KEY = {v: v.upper() for v in METADATA_TYPES}
|
||||
MP4_KEY = dict(zip(METADATA_TYPES, MP4_KEYS))
|
||||
MP3_KEY = dict(zip(METADATA_TYPES, MP3_KEYS))
|
||||
|
||||
|
||||
class Container(Enum):
|
||||
|
@ -37,10 +119,9 @@ class Container(Enum):
|
|||
# unreachable
|
||||
yield
|
||||
|
||||
|
||||
def _tag_flac(self, meta):
|
||||
for k, v in FLAC_KEY.items():
|
||||
tag = getattr(meta, k)
|
||||
tag = self._attr_from_meta(meta, k)
|
||||
if tag:
|
||||
if k in {
|
||||
"tracknumber",
|
||||
|
@ -52,7 +133,6 @@ class Container(Enum):
|
|||
|
||||
yield (v, str(tag))
|
||||
|
||||
|
||||
def _tag_mp3(self, meta):
|
||||
for k, v in MP3_KEY.items():
|
||||
if k == "tracknumber":
|
||||
|
@ -60,7 +140,7 @@ class Container(Enum):
|
|||
elif k == "discnumber":
|
||||
text = f"{meta.discnumber}/{meta.disctotal}"
|
||||
else:
|
||||
text = getattr(self, k)
|
||||
text = self._attr_from_meta(meta, k)
|
||||
|
||||
if text is not None and v is not None:
|
||||
yield (v.__name__, v(encoding=3, text=text))
|
||||
|
@ -72,30 +152,76 @@ class Container(Enum):
|
|||
elif k == "discnumber":
|
||||
text = [(meta.discnumber, meta.disctotal)]
|
||||
else:
|
||||
text = getattr(self, k)
|
||||
text = self._attr_from_meta(meta, k)
|
||||
|
||||
if v is not None and text is not None:
|
||||
yield (v, text)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class Tagger:
|
||||
meta: TrackMetadata
|
||||
|
||||
def tag(self, path: str, embed_cover: bool, covers: Cover):
|
||||
ext = path.split(".")[-1].upper()
|
||||
if ext == "flac":
|
||||
container = Container.FLAC
|
||||
elif ext == "m4a":
|
||||
container = Container.AAC
|
||||
elif ext == "mp3":
|
||||
container = Container.MP3
|
||||
def _attr_from_meta(self, meta: TrackMetadata, attr: str) -> str:
|
||||
# TODO: verify this works
|
||||
in_trackmetadata = {
|
||||
"title",
|
||||
"album",
|
||||
"artist",
|
||||
"tracknumber",
|
||||
"discnumber",
|
||||
"composer",
|
||||
}
|
||||
if attr in in_trackmetadata:
|
||||
return str(getattr(meta, attr))
|
||||
else:
|
||||
raise Exception(f"Invalid extension {ext}")
|
||||
return str(getattr(meta.album, attr))
|
||||
|
||||
audio = container.get_mutagen_class(path)
|
||||
tags = container.get_tag_pairs(self.meta)
|
||||
def tag_audio(self, audio, tags):
|
||||
for k, v in tags:
|
||||
audio[k] = v
|
||||
|
||||
c =
|
||||
async def embed_cover(self, audio, cover_path):
|
||||
if self == Container.FLAC:
|
||||
size = os.path.getsize(cover_path)
|
||||
if size > FLAC_MAX_BLOCKSIZE:
|
||||
raise Exception("Cover art too big for FLAC")
|
||||
cover = Picture()
|
||||
cover.type = 3
|
||||
cover.mime = "image/jpeg"
|
||||
async with aiofiles.open(cover_path, "rb") as img:
|
||||
cover.data = await img.read()
|
||||
audio.add_picture(cover)
|
||||
elif self == Container.MP3:
|
||||
cover = APIC()
|
||||
cover.type = 3
|
||||
cover.mime = "image/jpeg"
|
||||
async with aiofiles.open(cover_path, "rb") as img:
|
||||
cover.data = await img.read()
|
||||
audio.add(cover)
|
||||
elif self == Container.AAC:
|
||||
async with aiofiles.open(cover_path, "rb") as img:
|
||||
cover = MP4Cover(await img.read(), imageformat=MP4Cover.FORMAT_JPEG)
|
||||
audio["covr"] = [cover]
|
||||
|
||||
def save_audio(self, audio, path):
|
||||
if self == Container.FLAC:
|
||||
audio.save()
|
||||
elif self == Container.AAC:
|
||||
audio.save()
|
||||
elif self == Container.MP3:
|
||||
audio.save(path, "v2_version=3")
|
||||
|
||||
|
||||
async def tag_file(path: str, meta: TrackMetadata, cover_path: str | None):
|
||||
ext = path.split(".")[-1].upper()
|
||||
if ext == "flac":
|
||||
container = Container.FLAC
|
||||
elif ext == "m4a":
|
||||
container = Container.AAC
|
||||
elif ext == "mp3":
|
||||
container = Container.MP3
|
||||
else:
|
||||
raise Exception(f"Invalid extension {ext}")
|
||||
|
||||
audio = container.get_mutagen_class(path)
|
||||
tags = container.get_tag_pairs(meta)
|
||||
container.tag_audio(audio, tags)
|
||||
if cover_path is not None:
|
||||
await container.embed_cover(audio, cover_path)
|
||||
container.save_audio(audio, path)
|
||||
|
|
|
@ -23,12 +23,12 @@ class TidalClient(Client):
|
|||
self.logged_in = False
|
||||
self.global_config = config
|
||||
self.config = config.session.tidal
|
||||
self.session = self.get_session()
|
||||
self.rate_limiter = self.get_rate_limiter(
|
||||
config.session.downloads.requests_per_minute
|
||||
)
|
||||
|
||||
async def login(self):
|
||||
self.session = await self.get_session()
|
||||
c = self.config
|
||||
if not c.access_token:
|
||||
raise Exception("Access token not found in config.")
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
import asyncio
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
|
||||
from . import converter
|
||||
from .artwork import downscale_image
|
||||
from .client import Client
|
||||
from .config import Config
|
||||
from .downloadable import Downloadable
|
||||
from .downloadable import BasicDownloadable, Downloadable
|
||||
from .media import Media, Pending
|
||||
from .metadata import AlbumMetadata, TrackMetadata
|
||||
from .metadata import AlbumMetadata, Covers, TrackMetadata
|
||||
from .progress import get_progress_bar
|
||||
from .tagger import tag_file
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
|
@ -16,28 +19,33 @@ class Track(Media):
|
|||
downloadable: Downloadable
|
||||
config: Config
|
||||
folder: str
|
||||
# Is None if a cover doesn't exist for the track
|
||||
cover_path: str | None
|
||||
# change?
|
||||
download_path: str = ""
|
||||
|
||||
async def preprocess(self):
|
||||
folder = self._get_folder(self.folder)
|
||||
os.makedirs(folder, exist_ok=True)
|
||||
# Run in background while track downloads?
|
||||
# Don't download again if part of album
|
||||
await self._download_cover()
|
||||
self._set_download_path()
|
||||
os.makedirs(self.folder, exist_ok=True)
|
||||
|
||||
async def download(self):
|
||||
async with get_progress_bar(self.config, self.downloadable.size()) as bar:
|
||||
# TODO: progress bar description
|
||||
with get_progress_bar(
|
||||
self.config,
|
||||
await self.downloadable.size(),
|
||||
f"Track {self.meta.tracknumber}",
|
||||
) as bar:
|
||||
await self.downloadable.download(
|
||||
self.download_path, lambda x: bar.update(x)
|
||||
)
|
||||
|
||||
async def postprocess(self):
|
||||
await self._tag()
|
||||
await self._convert()
|
||||
if self.config.session.conversion.enabled:
|
||||
await self._convert()
|
||||
|
||||
async def _tag(self):
|
||||
t = Tagger(self.meta)
|
||||
t.tag(self.download_path)
|
||||
await tag_file(self.download_path, self.meta, self.cover_path)
|
||||
|
||||
async def _convert(self):
|
||||
CONV_CLASS = {
|
||||
|
@ -60,10 +68,10 @@ class Track(Media):
|
|||
engine.convert()
|
||||
self.download_path = engine.final_fn # because the extension changed
|
||||
|
||||
def _get_folder(self, parent: str) -> str:
|
||||
def _set_download_path(self):
|
||||
formatter = self.config.session.filepaths.track_format
|
||||
track_path = self.meta.format_track_path(formatter)
|
||||
return os.path.join(self.folder, track_path)
|
||||
self.download_path = os.path.join(self.folder, track_path)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
|
@ -73,6 +81,7 @@ class PendingTrack(Pending):
|
|||
client: Client
|
||||
config: Config
|
||||
folder: str
|
||||
cover_path: str
|
||||
|
||||
async def resolve(self) -> Track:
|
||||
resp = await self.client.get_metadata({"id": self.id}, "track")
|
||||
|
@ -80,4 +89,87 @@ class PendingTrack(Pending):
|
|||
quality = getattr(self.config.session, self.client.source).quality
|
||||
assert isinstance(quality, int)
|
||||
downloadable = await self.client.get_downloadable(self.id, quality)
|
||||
return Track(meta, downloadable, self.config, self.folder)
|
||||
return Track(meta, downloadable, self.config, self.folder, self.cover_path)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class PendingSingle(Pending):
|
||||
"""Whereas PendingTrack is used in the context of an album, where the album metadata
|
||||
and cover have been resolved, PendingSingle is used when a single track is downloaded.
|
||||
|
||||
This resolves the Album metadata and downloads the cover to pass to the Track class.
|
||||
"""
|
||||
|
||||
id: str
|
||||
client: Client
|
||||
config: Config
|
||||
|
||||
async def resolve(self) -> Track:
|
||||
resp = await self.client.get_metadata({"id": self.id}, "track")
|
||||
album = AlbumMetadata.from_resp(resp["album"], self.client.source)
|
||||
meta = TrackMetadata.from_resp(album, self.client.source, resp)
|
||||
|
||||
quality = getattr(self.config.session, self.client.source).quality
|
||||
assert isinstance(quality, int)
|
||||
folder = self._format_folder(album)
|
||||
os.makedirs(folder, exist_ok=True)
|
||||
|
||||
embedded_cover_path, downloadable = await asyncio.gather(
|
||||
self._download_cover(album.covers, folder),
|
||||
self.client.get_downloadable(self.id, quality),
|
||||
)
|
||||
return Track(meta, downloadable, self.config, folder, embedded_cover_path)
|
||||
|
||||
def _format_folder(self, meta: AlbumMetadata) -> str:
|
||||
c = self.config.session
|
||||
parent = c.downloads.folder
|
||||
formatter = c.filepaths.folder_format
|
||||
return os.path.join(parent, meta.format_folder_path(formatter))
|
||||
|
||||
async def _download_cover(self, covers: Covers, folder: str) -> str | None:
|
||||
"""Download artwork, which may include a seperate file to keep.
|
||||
|
||||
Args:
|
||||
covers (Covers): The set of available covers.
|
||||
|
||||
"""
|
||||
c = self.config.session.artwork
|
||||
if not c.save_artwork and not c.embed:
|
||||
# No need to download anything
|
||||
return None
|
||||
|
||||
session = self.client.session
|
||||
downloadables = []
|
||||
|
||||
hires_cover_path = None
|
||||
if c.save_artwork:
|
||||
l_url, _ = covers.largest()
|
||||
assert l_url is not None
|
||||
hires_cover_path = os.path.join(folder, "cover.jpg")
|
||||
downloadables.append(
|
||||
BasicDownloadable(session, l_url, "jpg").download(
|
||||
hires_cover_path, lambda _: None
|
||||
)
|
||||
)
|
||||
|
||||
embed_cover_path = None
|
||||
if c.embed:
|
||||
embed_url, _ = covers.get_size(c.embed_size)
|
||||
assert embed_url is not None
|
||||
embed_cover_path = os.path.join(folder, "embed_cover.jpg")
|
||||
downloadables.append(
|
||||
BasicDownloadable(session, embed_url, "jpg").download(
|
||||
embed_cover_path, lambda _: None
|
||||
)
|
||||
)
|
||||
await asyncio.gather(*downloadables)
|
||||
|
||||
if c.embed and c.embed_max_width > 0:
|
||||
assert embed_cover_path is not None
|
||||
downscale_image(embed_cover_path, c.embed_max_width)
|
||||
|
||||
if c.save_artwork and c.saved_max_width > 0:
|
||||
assert hires_cover_path is not None
|
||||
downscale_image(hires_cover_path, c.saved_max_width)
|
||||
|
||||
return embed_cover_path
|
||||
|
|
|
@ -130,13 +130,13 @@ def test_sample_config_data_fields(sample_config_data):
|
|||
assert sample_config_data.conversion == test_config.conversion
|
||||
|
||||
|
||||
def test_config_save_file_called_on_del(sample_config, mocker):
|
||||
sample_config.file.set_modified()
|
||||
mockf = mocker.Mock()
|
||||
|
||||
sample_config.save_file = mockf
|
||||
sample_config.__del__()
|
||||
mockf.assert_called_once()
|
||||
# def test_config_save_file_called_on_del(sample_config, mocker):
|
||||
# sample_config.file.set_modified()
|
||||
# mockf = mocker.Mock()
|
||||
#
|
||||
# sample_config.save_file = mockf
|
||||
# sample_config.__del__()
|
||||
# mockf.assert_called_once()
|
||||
|
||||
|
||||
def test_config_update_on_save():
|
||||
|
@ -152,17 +152,17 @@ def test_config_update_on_save():
|
|||
assert conf2.session.downloads.folder == "new_folder"
|
||||
|
||||
|
||||
def test_config_update_on_del():
|
||||
tmp_config_path = "tests/config2.toml"
|
||||
shutil.copy(SAMPLE_CONFIG, tmp_config_path)
|
||||
conf = Config(tmp_config_path)
|
||||
conf.file.downloads.folder = "new_folder"
|
||||
conf.file.set_modified()
|
||||
del conf
|
||||
conf2 = Config(tmp_config_path)
|
||||
os.remove(tmp_config_path)
|
||||
|
||||
assert conf2.session.downloads.folder == "new_folder"
|
||||
# def test_config_update_on_del():
|
||||
# tmp_config_path = "tests/config2.toml"
|
||||
# shutil.copy(SAMPLE_CONFIG, tmp_config_path)
|
||||
# conf = Config(tmp_config_path)
|
||||
# conf.file.downloads.folder = "new_folder"
|
||||
# conf.file.set_modified()
|
||||
# del conf
|
||||
# conf2 = Config(tmp_config_path)
|
||||
# os.remove(tmp_config_path)
|
||||
#
|
||||
# assert conf2.session.downloads.folder == "new_folder"
|
||||
|
||||
|
||||
def test_config_dont_update_without_set_modified():
|
||||
|
|
70
tests/test_qobuz_client.py
Normal file
70
tests/test_qobuz_client.py
Normal file
|
@ -0,0 +1,70 @@
|
|||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pytest
|
||||
from util import afor, arun
|
||||
|
||||
from streamrip.config import Config
|
||||
from streamrip.downloadable import BasicDownloadable
|
||||
from streamrip.exceptions import MissingCredentials
|
||||
from streamrip.qobuz_client import QobuzClient
|
||||
|
||||
logger = logging.getLogger("streamrip")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config():
|
||||
c = Config.defaults()
|
||||
c.session.qobuz.email_or_userid = os.environ["QOBUZ_EMAIL"]
|
||||
c.session.qobuz.password_or_token = hashlib.md5(
|
||||
os.environ["QOBUZ_PASSWORD"].encode("utf-8")
|
||||
).hexdigest()
|
||||
return c
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(config):
|
||||
c = QobuzClient(config) # type: ignore
|
||||
arun(c.login())
|
||||
return c
|
||||
|
||||
|
||||
def test_client_raises_missing_credentials():
|
||||
c = Config.defaults()
|
||||
with pytest.raises(MissingCredentials):
|
||||
arun(QobuzClient(c).login())
|
||||
|
||||
|
||||
def test_client_get_metadata(client):
|
||||
meta = arun(client.get_metadata("lzpf67e8f4h1a", "album"))
|
||||
assert meta["title"] == "I Killed Your Dog"
|
||||
assert len(meta["tracks"]["items"]) == 16
|
||||
assert meta["maximum_bit_depth"] == 24
|
||||
|
||||
|
||||
def test_client_get_downloadable(client):
|
||||
d = arun(client.get_downloadable("19512574", 3))
|
||||
assert isinstance(d, BasicDownloadable)
|
||||
assert d.extension == "flac"
|
||||
assert isinstance(d.url, str)
|
||||
assert "https://" in d.url
|
||||
|
||||
|
||||
def test_client_search_limit(client):
|
||||
res = client.search("rumours", "album", limit=5)
|
||||
total = 0
|
||||
for r in afor(res):
|
||||
total += len(r["albums"]["items"])
|
||||
assert total == 5
|
||||
|
||||
|
||||
def test_client_search_no_limit(client):
|
||||
res = client.search("rumours", "album", limit=None)
|
||||
correct_total = 0
|
||||
total = 0
|
||||
for r in afor(res):
|
||||
total += len(r["albums"]["items"])
|
||||
correct_total = max(correct_total, r["albums"]["total"])
|
||||
assert total == correct_total
|
Loading…
Add table
Add a link
Reference in a new issue