mirror of
https://github.com/nathom/streamrip.git
synced 2025-05-12 22:26:16 -04:00
Update
This commit is contained in:
parent
06335058f3
commit
36fd27c83c
17 changed files with 738 additions and 212 deletions
36
streamrip/artwork.py
Normal file
36
streamrip/artwork.py
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
|
||||||
|
def downscale_image(input_image_path: str, max_dimension: int):
|
||||||
|
"""Downscale an image in place given a maximum allowed dimension.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
input_image_path (str): Path to image
|
||||||
|
max_dimension (int): Maximum dimension allowed
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Open the image
|
||||||
|
image = Image.open(input_image_path)
|
||||||
|
|
||||||
|
# Get the original width and height
|
||||||
|
width, height = image.size
|
||||||
|
|
||||||
|
if max_dimension <= max(width, height):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Calculate the new dimensions while maintaining the aspect ratio
|
||||||
|
if width > height:
|
||||||
|
new_width = max_dimension
|
||||||
|
new_height = int(height * (max_dimension / width))
|
||||||
|
else:
|
||||||
|
new_height = max_dimension
|
||||||
|
new_width = int(width * (max_dimension / height))
|
||||||
|
|
||||||
|
# Resize the image with the new dimensions
|
||||||
|
resized_image = image.resize((new_width, new_height))
|
||||||
|
|
||||||
|
# Save the resized image
|
||||||
|
resized_image.save(input_image_path)
|
|
@ -1,5 +1,6 @@
|
||||||
"""The clients that interact with the streaming service APIs."""
|
"""The clients that interact with the streaming service APIs."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
@ -19,22 +20,23 @@ DEFAULT_USER_AGENT = (
|
||||||
class Client(ABC):
|
class Client(ABC):
|
||||||
source: str
|
source: str
|
||||||
max_quality: int
|
max_quality: int
|
||||||
|
session: aiohttp.ClientSession
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def login(self):
|
async def login(self):
|
||||||
raise NotImplemented
|
raise NotImplementedError
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def get_metadata(self, item: dict[str, Union[str, int, float]], media_type):
|
async def get_metadata(self, item: dict[str, Union[str, int, float]], media_type):
|
||||||
raise NotImplemented
|
raise NotImplementedError
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def search(self, query: str, media_type: str, limit: int = 500):
|
async def search(self, query: str, media_type: str, limit: int = 500):
|
||||||
raise NotImplemented
|
raise NotImplementedError
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def get_downloadable(self, item_id: str, quality: int) -> Downloadable:
|
async def get_downloadable(self, item_id: str, quality: int) -> Downloadable:
|
||||||
raise NotImplemented
|
raise NotImplementedError
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_rate_limiter(
|
def get_rate_limiter(
|
||||||
|
@ -47,21 +49,14 @@ class Client(ABC):
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_session(headers: Optional[dict] = None) -> aiohttp.ClientSession:
|
async def get_session(headers: Optional[dict] = None) -> aiohttp.ClientSession:
|
||||||
if headers is None:
|
if headers is None:
|
||||||
headers = {}
|
headers = {}
|
||||||
return aiohttp.ClientSession(
|
return aiohttp.ClientSession(
|
||||||
headers={"User-Agent": DEFAULT_USER_AGENT}, **headers
|
headers={"User-Agent": DEFAULT_USER_AGENT}, **headers
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
class NonStreamable(Exception):
|
# make sure http session is closed by end of program
|
||||||
pass
|
if hasattr(self, "session"):
|
||||||
|
asyncio.run(self.session.close())
|
||||||
|
|
||||||
class MissingCredentials(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class AuthenticationError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
|
@ -130,14 +130,15 @@ class ArtworkConfig:
|
||||||
# The size of the artwork to embed. Options: thumbnail, small, large, original.
|
# The size of the artwork to embed. Options: thumbnail, small, large, original.
|
||||||
# "original" images can be up to 30MB, and may fail embedding.
|
# "original" images can be up to 30MB, and may fail embedding.
|
||||||
# Using "large" is recommended.
|
# Using "large" is recommended.
|
||||||
size: str
|
embed_size: str
|
||||||
# Both of these options limit the size of the embedded artwork. If their values
|
# Both of these options limit the size of the embedded artwork. If their values
|
||||||
# are larger than the actual dimensions of the image, they will be ignored.
|
# are larger than the actual dimensions of the image, they will be ignored.
|
||||||
# If either value is -1, the image is left untouched.
|
# If either value is -1, the image is left untouched.
|
||||||
max_width: int
|
embed_max_width: int
|
||||||
max_height: int
|
|
||||||
# Save the cover image at the highest quality as a seperate jpg file
|
# Save the cover image at the highest quality as a seperate jpg file
|
||||||
keep_hires_cover: bool
|
save_artwork: bool
|
||||||
|
# If artwork is saved, downscale it to these dimensions, or ignore if -1
|
||||||
|
saved_max_width: int
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
|
@ -316,5 +317,6 @@ class Config:
|
||||||
self.file.update_toml()
|
self.file.update_toml()
|
||||||
toml_file.write(dumps(self.file.toml))
|
toml_file.write(dumps(self.file.toml))
|
||||||
|
|
||||||
def __del__(self):
|
@classmethod
|
||||||
self.save_file()
|
def defaults(cls):
|
||||||
|
return cls(DEFAULT_CONFIG_PATH)
|
||||||
|
|
|
@ -125,14 +125,16 @@ embed = true
|
||||||
# The size of the artwork to embed. Options: thumbnail, small, large, original.
|
# The size of the artwork to embed. Options: thumbnail, small, large, original.
|
||||||
# "original" images can be up to 30MB, and may fail embedding.
|
# "original" images can be up to 30MB, and may fail embedding.
|
||||||
# Using "large" is recommended.
|
# Using "large" is recommended.
|
||||||
size = "large"
|
embed_size = "large"
|
||||||
# Both of these options limit the size of the embedded artwork. If their values
|
# If this is set to a value > 0, max(width, height) of the embedded art will be set to this value in pixels
|
||||||
# are larger than the actual dimensions of the image, they will be ignored.
|
# Proportions of the image will remain the same
|
||||||
# If either value is -1, the image is left untouched.
|
embed_max_width = -1
|
||||||
max_width = -1
|
|
||||||
max_height = -1
|
|
||||||
# Save the cover image at the highest quality as a seperate jpg file
|
# Save the cover image at the highest quality as a seperate jpg file
|
||||||
keep_hires_cover = true
|
save_artwork = true
|
||||||
|
# If this is set to a value > 0, max(width, height) of the saved art will be set to this value in pixels
|
||||||
|
# Proportions of the image will remain the same
|
||||||
|
saved_max_width = -1
|
||||||
|
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
# Sets the value of the 'ALBUM' field in the metadata to the playlist's name.
|
# Sets the value of the 'ALBUM' field in the metadata to the playlist's name.
|
||||||
|
@ -150,7 +152,7 @@ exclude = []
|
||||||
# template
|
# template
|
||||||
add_singles_to_folder = false
|
add_singles_to_folder = false
|
||||||
# Available keys: "albumartist", "title", "year", "bit_depth", "sampling_rate",
|
# Available keys: "albumartist", "title", "year", "bit_depth", "sampling_rate",
|
||||||
# "container", "id", and "albumcomposer"
|
# "id", and "albumcomposer"
|
||||||
folder_format = "{albumartist} - {title} ({year}) [{container}] [{bit_depth}B-{sampling_rate}kHz]"
|
folder_format = "{albumartist} - {title} ({year}) [{container}] [{bit_depth}B-{sampling_rate}kHz]"
|
||||||
# Available keys: "tracknumber", "artist", "albumartist", "composer", "title",
|
# Available keys: "tracknumber", "artist", "albumartist", "composer", "title",
|
||||||
# and "albumcomposer"
|
# and "albumcomposer"
|
||||||
|
|
|
@ -4,9 +4,10 @@ import hashlib
|
||||||
import deezer
|
import deezer
|
||||||
from Cryptodome.Cipher import AES
|
from Cryptodome.Cipher import AES
|
||||||
|
|
||||||
from .client import AuthenticationError, Client, MissingCredentials, NonStreamable
|
from .client import Client
|
||||||
from .config import Config
|
from .config import Config
|
||||||
from .downloadable import DeezerDownloadable
|
from .downloadable import DeezerDownloadable
|
||||||
|
from .exceptions import AuthenticationError, MissingCredentials, NonStreamable
|
||||||
|
|
||||||
|
|
||||||
class DeezerClient(Client):
|
class DeezerClient(Client):
|
||||||
|
@ -120,7 +121,7 @@ class DeezerClient(Client):
|
||||||
)
|
)
|
||||||
|
|
||||||
dl_info["url"] = url
|
dl_info["url"] = url
|
||||||
return DeezerDownloadable(dl_info)
|
return DeezerDownloadable(self.session, dl_info)
|
||||||
|
|
||||||
def _get_encrypted_file_url(
|
def _get_encrypted_file_url(
|
||||||
self, meta_id: str, track_hash: str, media_version: str
|
self, meta_id: str, track_hash: str, media_version: str
|
||||||
|
|
|
@ -10,7 +10,7 @@ import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Callable, Optional
|
from typing import Any, Callable, Optional
|
||||||
|
|
||||||
import aiofiles
|
import aiofiles
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
@ -18,7 +18,7 @@ import m3u8
|
||||||
from Cryptodome.Cipher import Blowfish
|
from Cryptodome.Cipher import Blowfish
|
||||||
|
|
||||||
from . import converter
|
from . import converter
|
||||||
from .client import NonStreamable
|
from .exceptions import NonStreamable
|
||||||
|
|
||||||
|
|
||||||
def generate_temp_path(url: str):
|
def generate_temp_path(url: str):
|
||||||
|
@ -34,7 +34,7 @@ class Downloadable(ABC):
|
||||||
chunk_size = 1024
|
chunk_size = 1024
|
||||||
_size: Optional[int] = None
|
_size: Optional[int] = None
|
||||||
|
|
||||||
async def download(self, path: str, callback: Callable[[int], None]):
|
async def download(self, path: str, callback: Callable[[int], Any]):
|
||||||
tmp = generate_temp_path(self.url)
|
tmp = generate_temp_path(self.url)
|
||||||
await self._download(tmp, callback)
|
await self._download(tmp, callback)
|
||||||
shutil.move(tmp, path)
|
shutil.move(tmp, path)
|
||||||
|
@ -52,15 +52,17 @@ class Downloadable(ABC):
|
||||||
async def _download(self, path: str, callback: Callable[[int], None]):
|
async def _download(self, path: str, callback: Callable[[int], None]):
|
||||||
raise NotImplemented
|
raise NotImplemented
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f"{self.__class__.__name__}({self.__dict__})"
|
||||||
|
|
||||||
|
|
||||||
class BasicDownloadable(Downloadable):
|
class BasicDownloadable(Downloadable):
|
||||||
"""Just downloads a URL."""
|
"""Just downloads a URL."""
|
||||||
|
|
||||||
def __init__(self, session: aiohttp.ClientSession, url: str):
|
def __init__(self, session: aiohttp.ClientSession, url: str, extension: str):
|
||||||
self.session = session
|
self.session = session
|
||||||
self.url = url
|
self.url = url
|
||||||
# TODO: verify that this is correct
|
self.extension = extension
|
||||||
self.extension = url.split(".")[-1]
|
|
||||||
|
|
||||||
async def _download(self, path: str, callback: Callable[[int], None]):
|
async def _download(self, path: str, callback: Callable[[int], None]):
|
||||||
async with self.session.get(
|
async with self.session.get(
|
||||||
|
@ -173,7 +175,7 @@ class TidalDownloadable(Downloadable):
|
||||||
raise NonStreamable(f"Tidal download: dl_info = {info}")
|
raise NonStreamable(f"Tidal download: dl_info = {info}")
|
||||||
|
|
||||||
assert isinstance(url, str)
|
assert isinstance(url, str)
|
||||||
self.downloadable = BasicDownloadable(session, url)
|
self.downloadable = BasicDownloadable(session, url, "m4a")
|
||||||
|
|
||||||
async def _download(self, path: str, callback):
|
async def _download(self, path: str, callback):
|
||||||
await self.downloadable._download(path, callback)
|
await self.downloadable._download(path, callback)
|
||||||
|
@ -198,7 +200,7 @@ class SoundcloudDownloadable(Downloadable):
|
||||||
await self._download_original(path, callback)
|
await self._download_original(path, callback)
|
||||||
|
|
||||||
async def _download_original(self, path: str, callback):
|
async def _download_original(self, path: str, callback):
|
||||||
downloader = BasicDownloadable(self.session, self.url)
|
downloader = BasicDownloadable(self.session, self.url, "flac")
|
||||||
await downloader.download(path, callback)
|
await downloader.download(path, callback)
|
||||||
engine = converter.FLAC(path)
|
engine = converter.FLAC(path)
|
||||||
engine.convert(path)
|
engine.convert(path)
|
||||||
|
|
|
@ -4,23 +4,20 @@ from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from collections import OrderedDict
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from string import Formatter
|
from string import Formatter
|
||||||
from typing import Generator, Hashable, Iterable, Optional, Type, Union
|
from typing import Optional, Type, TypeVar
|
||||||
|
|
||||||
from .constants import (
|
# from .constants import (
|
||||||
ALBUM_KEYS,
|
# ALBUM_KEYS,
|
||||||
COPYRIGHT,
|
# COPYRIGHT,
|
||||||
FLAC_KEY,
|
# FLAC_KEY,
|
||||||
MP3_KEY,
|
# MP3_KEY,
|
||||||
MP4_KEY,
|
# MP4_KEY,
|
||||||
PHON_COPYRIGHT,
|
# PHON_COPYRIGHT,
|
||||||
TIDAL_Q_MAP,
|
# TIDAL_Q_MAP,
|
||||||
TRACK_KEYS,
|
# TRACK_KEYS,
|
||||||
)
|
# )
|
||||||
from .exceptions import InvalidContainerError, InvalidSourceError
|
|
||||||
from .utils import get_cover_urls, get_quality_id
|
|
||||||
|
|
||||||
logger = logging.getLogger("streamrip")
|
logger = logging.getLogger("streamrip")
|
||||||
|
|
||||||
|
@ -32,16 +29,85 @@ def get_album_track_ids(source: str, resp) -> list[str]:
|
||||||
return [track["id"] for track in tracklist]
|
return [track["id"] for track in tracklist]
|
||||||
|
|
||||||
|
|
||||||
|
# (url to cover, downloaded path of cover)
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
class CoverUrls:
|
class Covers:
|
||||||
thumbnail: Optional[str]
|
CoverEntry = tuple[str | None, str | None]
|
||||||
small: Optional[str]
|
thumbnail: CoverEntry
|
||||||
large: Optional[str]
|
small: CoverEntry
|
||||||
original: Optional[str]
|
large: CoverEntry
|
||||||
|
original: CoverEntry
|
||||||
|
|
||||||
def largest(self) -> Optional[str]:
|
def empty(self) -> bool:
|
||||||
# Return first non-None item
|
return all(
|
||||||
return self.original or self.large or self.small or self.thumbnail
|
url is None
|
||||||
|
for url, _ in (self.original, self.large, self.small, self.thumbnail)
|
||||||
|
)
|
||||||
|
|
||||||
|
def largest(self) -> CoverEntry:
|
||||||
|
# Return first item with url
|
||||||
|
if self.original[0]:
|
||||||
|
return self.original
|
||||||
|
|
||||||
|
if self.large[0]:
|
||||||
|
return self.large
|
||||||
|
|
||||||
|
if self.small[0]:
|
||||||
|
return self.small
|
||||||
|
|
||||||
|
if self.thumbnail[0]:
|
||||||
|
return self.thumbnail
|
||||||
|
|
||||||
|
raise Exception("No covers found")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_qobuz(cls, resp):
|
||||||
|
cover_urls = {k: (v, None) for k, v in resp["image"].items()}
|
||||||
|
cover_urls["original"] = ("org".join(cover_urls["large"].rsplit("600", 1)), None) # type: ignore
|
||||||
|
return cls(**cover_urls) # type: ignore
|
||||||
|
|
||||||
|
def get_size(self, size: str) -> CoverEntry:
|
||||||
|
"""Get the cover size, or the largest cover smaller than `size`.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
size (str):
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
CoverEntry
|
||||||
|
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
Exception: If a suitable cover doesn't exist
|
||||||
|
|
||||||
|
"""
|
||||||
|
fallback = False
|
||||||
|
if size == "original":
|
||||||
|
if self.original[0] is not None:
|
||||||
|
return self.original
|
||||||
|
else:
|
||||||
|
fallback = True
|
||||||
|
|
||||||
|
if fallback or size == "large":
|
||||||
|
if self.large[0] is not None:
|
||||||
|
return self.large
|
||||||
|
else:
|
||||||
|
fallback = True
|
||||||
|
|
||||||
|
if fallback or size == "small":
|
||||||
|
if self.small[0] is not None:
|
||||||
|
return self.small
|
||||||
|
else:
|
||||||
|
fallback = True
|
||||||
|
|
||||||
|
# At this point, either size == 'thumbnail' or nothing else was found
|
||||||
|
if self.thumbnail[0] is None:
|
||||||
|
raise Exception(f"No covers found for {size = }. Covers: {self}")
|
||||||
|
|
||||||
|
return self.thumbnail
|
||||||
|
|
||||||
|
|
||||||
|
COPYRIGHT = "\u2117"
|
||||||
|
PHON_COPYRIGHT = "\u00a9"
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
|
@ -57,7 +123,31 @@ class TrackMetadata:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_qobuz(cls, album: AlbumMetadata, resp) -> TrackMetadata:
|
def from_qobuz(cls, album: AlbumMetadata, resp) -> TrackMetadata:
|
||||||
raise NotImplemented
|
title = typed(resp["title"].strip(), str)
|
||||||
|
|
||||||
|
version = resp.get("version")
|
||||||
|
work = resp.get("work")
|
||||||
|
if version is not None and version not in title:
|
||||||
|
title = f"{title} ({version})"
|
||||||
|
if work is not None and work not in title:
|
||||||
|
title = f"{work}: {title}"
|
||||||
|
|
||||||
|
composer = typed(resp.get("composer", {}).get("name"), str | None)
|
||||||
|
tracknumber = typed(resp.get("track_number", 1), int)
|
||||||
|
discnumber = typed(resp.get("media_number", 1), int)
|
||||||
|
artist = typed(safe_get(resp, "performer", "name"), str)
|
||||||
|
track_id = typed(resp["id"], str)
|
||||||
|
|
||||||
|
info = TrackInfo(id=track_id, quality=album.info.quality)
|
||||||
|
return cls(
|
||||||
|
info=info,
|
||||||
|
title=title,
|
||||||
|
album=album,
|
||||||
|
artist=artist,
|
||||||
|
tracknumber=tracknumber,
|
||||||
|
discnumber=discnumber,
|
||||||
|
composer=composer,
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_deezer(cls, album: AlbumMetadata, resp) -> TrackMetadata:
|
def from_deezer(cls, album: AlbumMetadata, resp) -> TrackMetadata:
|
||||||
|
@ -83,8 +173,18 @@ class TrackMetadata:
|
||||||
return cls.from_deezer(album, resp)
|
return cls.from_deezer(album, resp)
|
||||||
raise Exception
|
raise Exception
|
||||||
|
|
||||||
def format_track_path(self, formatter: str):
|
def format_track_path(self, formatter: str) -> str:
|
||||||
pass
|
# Available keys: "tracknumber", "artist", "albumartist", "composer", "title",
|
||||||
|
# and "albumcomposer"
|
||||||
|
info = {
|
||||||
|
"title": self.title,
|
||||||
|
"tracknumber": self.tracknumber,
|
||||||
|
"artist": self.artist,
|
||||||
|
"albumartist": self.album.albumartist,
|
||||||
|
"albumcomposer": self.album.albumcomposer or "None",
|
||||||
|
"composer": self.composer or "None",
|
||||||
|
}
|
||||||
|
return formatter.format(**info)
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
|
@ -107,13 +207,12 @@ class AlbumMetadata:
|
||||||
albumartist: str
|
albumartist: str
|
||||||
year: str
|
year: str
|
||||||
genre: list[str]
|
genre: list[str]
|
||||||
covers: CoverUrls
|
covers: Covers
|
||||||
|
|
||||||
albumcomposer: Optional[str] = None
|
albumcomposer: Optional[str] = None
|
||||||
comment: Optional[str] = None
|
comment: Optional[str] = None
|
||||||
compilation: Optional[str] = None
|
compilation: Optional[str] = None
|
||||||
copyright: Optional[str] = None
|
copyright: Optional[str] = None
|
||||||
cover: Optional[str] = None
|
|
||||||
date: Optional[str] = None
|
date: Optional[str] = None
|
||||||
description: Optional[str] = None
|
description: Optional[str] = None
|
||||||
disctotal: Optional[int] = None
|
disctotal: Optional[int] = None
|
||||||
|
@ -123,6 +222,20 @@ class AlbumMetadata:
|
||||||
purchase_date: Optional[str] = None
|
purchase_date: Optional[str] = None
|
||||||
tracktotal: Optional[int] = None
|
tracktotal: Optional[int] = None
|
||||||
|
|
||||||
|
def format_folder_path(self, formatter: str) -> str:
|
||||||
|
# Available keys: "albumartist", "title", "year", "bit_depth", "sampling_rate",
|
||||||
|
# "id", and "albumcomposer"
|
||||||
|
info = {
|
||||||
|
"albumartist": self.albumartist,
|
||||||
|
"albumcomposer": self.albumcomposer or "None",
|
||||||
|
"bit_depth": self.info.bit_depth,
|
||||||
|
"id": self.info.id,
|
||||||
|
"sampling_rate": self.info.sampling_rate,
|
||||||
|
"title": self.album,
|
||||||
|
"year": self.year,
|
||||||
|
}
|
||||||
|
return formatter.format(**info)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_qobuz(cls, resp) -> AlbumMetadata:
|
def from_qobuz(cls, resp) -> AlbumMetadata:
|
||||||
album = resp.get("title", "Unknown Album")
|
album = resp.get("title", "Unknown Album")
|
||||||
|
@ -131,77 +244,84 @@ class AlbumMetadata:
|
||||||
genres = list(set(re.findall(r"([^\u2192\/]+)", "/".join(genre))))
|
genres = list(set(re.findall(r"([^\u2192\/]+)", "/".join(genre))))
|
||||||
date = resp.get("release_date_original") or resp.get("release_date")
|
date = resp.get("release_date_original") or resp.get("release_date")
|
||||||
year = date[:4]
|
year = date[:4]
|
||||||
copyright = resp.get("copyright")
|
|
||||||
|
_copyright = resp.get("copyright")
|
||||||
|
_copyright = re.sub(r"(?i)\(P\)", PHON_COPYRIGHT, _copyright)
|
||||||
|
_copyright = re.sub(r"(?i)\(C\)", COPYRIGHT, _copyright)
|
||||||
|
|
||||||
if artists := resp.get("artists"):
|
if artists := resp.get("artists"):
|
||||||
albumartist = ", ".join(a["name"] for a in artists)
|
albumartist = ", ".join(a["name"] for a in artists)
|
||||||
else:
|
else:
|
||||||
albumartist = safe_get(resp, "artist", "name")
|
albumartist = typed(safe_get(resp, "artist", "name"), str)
|
||||||
|
|
||||||
albumcomposer = safe_get(resp, "composer", "name")
|
albumcomposer = typed(safe_get(resp, "composer", "name"), str | None)
|
||||||
label = resp.get("label")
|
_label = resp.get("label")
|
||||||
description = resp.get("description")
|
if isinstance(_label, dict):
|
||||||
disctotal = (
|
_label = _label["name"]
|
||||||
|
label = typed(_label, str | None)
|
||||||
|
description = typed(resp.get("description"), str | None)
|
||||||
|
disctotal = typed(
|
||||||
max(
|
max(
|
||||||
track.get("media_number", 1)
|
track.get("media_number", 1)
|
||||||
for track in safe_get(resp, "tracks", "items", default=[{}])
|
for track in safe_get(resp, "tracks", "items", default=[{}]) # type: ignore
|
||||||
)
|
)
|
||||||
or 1
|
or 1,
|
||||||
|
int,
|
||||||
)
|
)
|
||||||
explicit = resp.get("parental_warning", False)
|
explicit = typed(resp.get("parental_warning", False), bool)
|
||||||
|
|
||||||
if isinstance(label, dict):
|
|
||||||
label = self.label.get("name")
|
|
||||||
|
|
||||||
# Non-embedded information
|
# Non-embedded information
|
||||||
version = resp.get("version")
|
# version = resp.get("version")
|
||||||
cover_urls = CoverUrls.from_qobuz(resp)
|
cover_urls = Covers.from_qobuz(resp)
|
||||||
streamable = resp.get("streamable", False)
|
streamable = typed(resp.get("streamable", False), bool)
|
||||||
bit_depth = resp.get("maximum_bit_depth")
|
assert streamable
|
||||||
sampling_rate = resp.get("maximum_sampling_rate")
|
bit_depth = typed(resp.get("maximum_bit_depth"), int | None)
|
||||||
quality = get_quality_id(self.bit_depth, self.sampling_rate)
|
sampling_rate = typed(resp.get("maximum_sampling_rate"), int | None)
|
||||||
|
quality = get_quality_id(bit_depth, sampling_rate)
|
||||||
booklets = resp.get("goodies")
|
booklets = resp.get("goodies")
|
||||||
item_id = resp.get("id")
|
item_id = resp.get("id")
|
||||||
|
|
||||||
if sampling_rate is not None:
|
if sampling_rate is not None:
|
||||||
sampling_rate *= 1000
|
sampling_rate *= 1000
|
||||||
|
|
||||||
info = AlbumInfo(item_id, quality, explicit, sampling_rate, bit_depth, booklets)
|
info = AlbumInfo(
|
||||||
|
item_id, quality, label, explicit, sampling_rate, bit_depth, booklets
|
||||||
|
)
|
||||||
return AlbumMetadata(
|
return AlbumMetadata(
|
||||||
|
info,
|
||||||
album,
|
album,
|
||||||
albumartist,
|
albumartist,
|
||||||
year,
|
year,
|
||||||
genre=genres,
|
genre=genres,
|
||||||
covers=cover_urls,
|
covers=cover_urls,
|
||||||
albumcomposer,
|
albumcomposer=albumcomposer,
|
||||||
comment,
|
comment=None,
|
||||||
compilation,
|
compilation=None,
|
||||||
copyright(),
|
copyright=_copyright,
|
||||||
cover,
|
date=date,
|
||||||
date,
|
description=description,
|
||||||
description,
|
disctotal=disctotal,
|
||||||
disctotal,
|
encoder=None,
|
||||||
encoder,
|
grouping=None,
|
||||||
grouping,
|
lyrics=None,
|
||||||
lyrics,
|
purchase_date=None,
|
||||||
purchase_date,
|
tracktotal=tracktotal,
|
||||||
tracktotal,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_deezer(cls, resp) -> AlbumMetadata:
|
def from_deezer(cls, resp) -> AlbumMetadata:
|
||||||
raise NotImplemented
|
raise NotImplementedError
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_soundcloud(cls, resp) -> AlbumMetadata:
|
def from_soundcloud(cls, resp) -> AlbumMetadata:
|
||||||
raise NotImplemented
|
raise NotImplementedError
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_tidal(cls, resp) -> AlbumMetadata:
|
def from_tidal(cls, resp) -> AlbumMetadata:
|
||||||
raise NotImplemented
|
raise NotImplementedError
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_resp(cls, source, resp) -> AlbumMetadata:
|
def from_resp(cls, resp, source) -> AlbumMetadata:
|
||||||
if source == "qobuz":
|
if source == "qobuz":
|
||||||
return cls.from_qobuz(resp)
|
return cls.from_qobuz(resp)
|
||||||
if source == "tidal":
|
if source == "tidal":
|
||||||
|
@ -210,13 +330,14 @@ class AlbumMetadata:
|
||||||
return cls.from_soundcloud(resp)
|
return cls.from_soundcloud(resp)
|
||||||
if source == "deezer":
|
if source == "deezer":
|
||||||
return cls.from_deezer(resp)
|
return cls.from_deezer(resp)
|
||||||
raise Exception
|
raise Exception("Invalid source")
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
class AlbumInfo:
|
class AlbumInfo:
|
||||||
id: str
|
id: str
|
||||||
quality: int
|
quality: int
|
||||||
|
label: Optional[str] = None
|
||||||
explicit: bool = False
|
explicit: bool = False
|
||||||
sampling_rate: Optional[int] = None
|
sampling_rate: Optional[int] = None
|
||||||
bit_depth: Optional[int] = None
|
bit_depth: Optional[int] = None
|
||||||
|
@ -232,7 +353,7 @@ def keys_in_format_string(s: str):
|
||||||
return [f[1] for f in _formatter.parse(s) if f[1] is not None]
|
return [f[1] for f in _formatter.parse(s) if f[1] is not None]
|
||||||
|
|
||||||
|
|
||||||
def safe_get(d: dict, *keys, default=None):
|
def safe_get(d: dict, *keys, default=None) -> dict | str | int | list | None:
|
||||||
"""Nested __getitem__ calls with a default value.
|
"""Nested __getitem__ calls with a default value.
|
||||||
|
|
||||||
Use to avoid key not found errors.
|
Use to avoid key not found errors.
|
||||||
|
@ -243,3 +364,35 @@ def safe_get(d: dict, *keys, default=None):
|
||||||
if _d == {}:
|
if _d == {}:
|
||||||
return default
|
return default
|
||||||
return _d
|
return _d
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
def typed(thing, expected_type: Type[T]) -> T:
|
||||||
|
assert isinstance(thing, expected_type)
|
||||||
|
return thing
|
||||||
|
|
||||||
|
|
||||||
|
def get_quality_id(bit_depth: Optional[int], sampling_rate: Optional[int]) -> int:
|
||||||
|
"""Get the universal quality id from bit depth and sampling rate.
|
||||||
|
|
||||||
|
:param bit_depth:
|
||||||
|
:type bit_depth: Optional[int]
|
||||||
|
:param sampling_rate: In kHz
|
||||||
|
:type sampling_rate: Optional[int]
|
||||||
|
"""
|
||||||
|
# XXX: Should `0` quality be supported?
|
||||||
|
if bit_depth is None or sampling_rate is None: # is lossy
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if bit_depth == 16:
|
||||||
|
return 2
|
||||||
|
|
||||||
|
if bit_depth == 24:
|
||||||
|
if sampling_rate <= 96:
|
||||||
|
return 3
|
||||||
|
|
||||||
|
return 4
|
||||||
|
|
||||||
|
raise Exception(f"Invalid {bit_depth = }")
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from click import style
|
from click import style
|
||||||
from tqdm import tqdm
|
from tqdm.asyncio import tqdm
|
||||||
|
|
||||||
|
from .config import Config
|
||||||
|
|
||||||
THEMES = {
|
THEMES = {
|
||||||
"plain": None,
|
"plain": None,
|
||||||
|
@ -14,8 +16,8 @@ THEMES = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_progress_bar(total, theme="dainty", desc: Optional[str] = None, unit="B"):
|
def get_progress_bar(config: Config, total: int, desc: Optional[str], unit="B"):
|
||||||
theme = THEMES[theme]
|
theme = THEMES[config.session.theme.progress_bar]
|
||||||
return tqdm(
|
return tqdm(
|
||||||
total=total,
|
total=total,
|
||||||
unit=unit,
|
unit=unit,
|
||||||
|
|
|
@ -5,9 +5,10 @@ from getpass import getpass
|
||||||
|
|
||||||
from click import launch, secho, style
|
from click import launch, secho, style
|
||||||
|
|
||||||
from .client import AuthenticationError, Client, MissingCredentials
|
from .client import Client
|
||||||
from .config import Config
|
from .config import Config
|
||||||
from .deezer_client import DeezerClient
|
from .deezer_client import DeezerClient
|
||||||
|
from .exceptions import AuthenticationError, MissingCredentials
|
||||||
from .qobuz_client import QobuzClient
|
from .qobuz_client import QobuzClient
|
||||||
from .tidal_client import TidalClient
|
from .tidal_client import TidalClient
|
||||||
|
|
||||||
|
|
|
@ -5,10 +5,7 @@ import re
|
||||||
import time
|
import time
|
||||||
from typing import AsyncGenerator, Optional
|
from typing import AsyncGenerator, Optional
|
||||||
|
|
||||||
import aiohttp
|
from .client import Client
|
||||||
from aiolimiter import AsyncLimiter
|
|
||||||
|
|
||||||
from .client import DEFAULT_USER_AGENT, Client
|
|
||||||
from .config import Config
|
from .config import Config
|
||||||
from .downloadable import BasicDownloadable, Downloadable
|
from .downloadable import BasicDownloadable, Downloadable
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
|
@ -51,13 +48,13 @@ class QobuzClient(Client):
|
||||||
def __init__(self, config: Config):
|
def __init__(self, config: Config):
|
||||||
self.logged_in = False
|
self.logged_in = False
|
||||||
self.config = config
|
self.config = config
|
||||||
self.session = self.get_session()
|
|
||||||
self.rate_limiter = self.get_rate_limiter(
|
self.rate_limiter = self.get_rate_limiter(
|
||||||
config.session.downloads.requests_per_minute
|
config.session.downloads.requests_per_minute
|
||||||
)
|
)
|
||||||
self.secret: Optional[str] = None
|
self.secret: Optional[str] = None
|
||||||
|
|
||||||
async def login(self):
|
async def login(self):
|
||||||
|
self.session = await self.get_session()
|
||||||
c = self.config.session.qobuz
|
c = self.config.session.qobuz
|
||||||
if not c.email_or_userid or not c.password_or_token:
|
if not c.email_or_userid or not c.password_or_token:
|
||||||
raise MissingCredentials
|
raise MissingCredentials
|
||||||
|
@ -65,11 +62,13 @@ class QobuzClient(Client):
|
||||||
assert not self.logged_in, "Already logged in"
|
assert not self.logged_in, "Already logged in"
|
||||||
|
|
||||||
if not c.app_id or not c.secrets:
|
if not c.app_id or not c.secrets:
|
||||||
|
logger.info("App id/secrets not found, fetching")
|
||||||
c.app_id, c.secrets = await self._get_app_id_and_secrets()
|
c.app_id, c.secrets = await self._get_app_id_and_secrets()
|
||||||
# write to file
|
# write to file
|
||||||
self.config.file.qobuz.app_id = c.app_id
|
self.config.file.qobuz.app_id = c.app_id
|
||||||
self.config.file.qobuz.secrets = c.secrets
|
self.config.file.qobuz.secrets = c.secrets
|
||||||
self.config.file.set_modified()
|
self.config.file.set_modified()
|
||||||
|
logger.debug(f"Found {c.app_id = } {c.secrets = }")
|
||||||
|
|
||||||
self.session.headers.update({"X-App-Id": c.app_id})
|
self.session.headers.update({"X-App-Id": c.app_id})
|
||||||
self.secret = await self._get_valid_secret(c.secrets)
|
self.secret = await self._get_valid_secret(c.secrets)
|
||||||
|
@ -87,22 +86,21 @@ class QobuzClient(Client):
|
||||||
"app_id": c.app_id,
|
"app_id": c.app_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
resp = await self._api_request("user/login", params)
|
logger.debug("Request params %s", params)
|
||||||
|
status, resp = await self._api_request("user/login", params)
|
||||||
|
logger.debug("Login resp: %s", resp)
|
||||||
|
|
||||||
if resp.status == 401:
|
if status == 401:
|
||||||
raise AuthenticationError(f"Invalid credentials from params {params}")
|
raise AuthenticationError(f"Invalid credentials from params {params}")
|
||||||
elif resp.status == 400:
|
elif status == 400:
|
||||||
logger.debug(resp)
|
|
||||||
raise InvalidAppIdError(f"Invalid app id from params {params}")
|
raise InvalidAppIdError(f"Invalid app id from params {params}")
|
||||||
|
|
||||||
logger.info("Logged in to Qobuz")
|
logger.info("Logged in to Qobuz")
|
||||||
|
|
||||||
resp_json = await resp.json()
|
if not resp["user"]["credential"]["parameters"]:
|
||||||
|
|
||||||
if not resp_json["user"]["credential"]["parameters"]:
|
|
||||||
raise IneligibleError("Free accounts are not eligible to download tracks.")
|
raise IneligibleError("Free accounts are not eligible to download tracks.")
|
||||||
|
|
||||||
uat = resp_json["user_auth_token"]
|
uat = resp["user_auth_token"]
|
||||||
self.session.headers.update({"X-User-Auth-Token": uat})
|
self.session.headers.update({"X-User-Auth-Token": uat})
|
||||||
# label = resp_json["user"]["credential"]["parameters"]["short_label"]
|
# label = resp_json["user"]["credential"]["parameters"]["short_label"]
|
||||||
|
|
||||||
|
@ -131,20 +129,19 @@ class QobuzClient(Client):
|
||||||
|
|
||||||
epoint = f"{media_type}/get"
|
epoint = f"{media_type}/get"
|
||||||
|
|
||||||
response = await self._api_request(epoint, params)
|
status, resp = await self._api_request(epoint, params)
|
||||||
resp_json = await response.json()
|
|
||||||
|
|
||||||
if response.status != 200:
|
if status != 200:
|
||||||
raise Exception(f'Error fetching metadata. "{resp_json["message"]}"')
|
raise Exception(f'Error fetching metadata. "{resp["message"]}"')
|
||||||
|
|
||||||
return resp_json
|
return resp
|
||||||
|
|
||||||
async def search(
|
async def search(
|
||||||
self, query: str, media_type: str, limit: int = 500
|
self, query: str, media_type: str, limit: int = 500
|
||||||
) -> AsyncGenerator:
|
) -> AsyncGenerator:
|
||||||
params = {
|
params = {
|
||||||
"query": query,
|
"query": query,
|
||||||
"limit": limit,
|
# "limit": limit,
|
||||||
}
|
}
|
||||||
# TODO: move featured, favorites, and playlists into _api_get later
|
# TODO: move featured, favorites, and playlists into _api_get later
|
||||||
if media_type == "featured":
|
if media_type == "featured":
|
||||||
|
@ -164,13 +161,15 @@ class QobuzClient(Client):
|
||||||
else:
|
else:
|
||||||
epoint = f"{media_type}/search"
|
epoint = f"{media_type}/search"
|
||||||
|
|
||||||
return self._paginate(epoint, params)
|
async for status, resp in self._paginate(epoint, params, limit=limit):
|
||||||
|
assert status == 200
|
||||||
|
yield resp
|
||||||
|
|
||||||
async def get_downloadable(self, item_id: str, quality: int) -> Downloadable:
|
async def get_downloadable(self, item_id: str, quality: int) -> Downloadable:
|
||||||
assert self.secret is not None and self.logged_in and 1 <= quality <= 4
|
assert self.secret is not None and self.logged_in and 1 <= quality <= 4
|
||||||
|
|
||||||
resp = await self._request_file_url(item_id, quality, self.secret)
|
status, resp_json = await self._request_file_url(item_id, quality, self.secret)
|
||||||
resp_json = await resp.json()
|
assert status == 200
|
||||||
stream_url = resp_json.get("url")
|
stream_url = resp_json.get("url")
|
||||||
|
|
||||||
if stream_url is None:
|
if stream_url is None:
|
||||||
|
@ -183,32 +182,51 @@ class QobuzClient(Client):
|
||||||
)
|
)
|
||||||
raise NonStreamable
|
raise NonStreamable
|
||||||
|
|
||||||
return BasicDownloadable(self.session, stream_url)
|
return BasicDownloadable(
|
||||||
|
self.session, stream_url, "flac" if quality > 1 else "mp3"
|
||||||
|
)
|
||||||
|
|
||||||
async def _paginate(self, epoint: str, params: dict) -> AsyncGenerator[dict, None]:
|
async def _paginate(
|
||||||
response = await self._api_request(epoint, params)
|
self, epoint: str, params: dict, limit: Optional[int] = None
|
||||||
page = await response.json()
|
) -> AsyncGenerator[tuple[int, dict], None]:
|
||||||
logger.debug("Keys returned from _gen_pages: %s", ", ".join(page.keys()))
|
"""Paginate search results.
|
||||||
|
|
||||||
|
params:
|
||||||
|
limit: If None, all the results are yielded. Otherwise a maximum
|
||||||
|
of `limit` results are yielded.
|
||||||
|
|
||||||
|
returns:
|
||||||
|
Generator that yields (status code, response) tuples
|
||||||
|
"""
|
||||||
|
params.update({"limit": limit or 500})
|
||||||
|
status, page = await self._api_request(epoint, params)
|
||||||
|
logger.debug("paginate: initial request made with status %d", status)
|
||||||
|
# albums, tracks, etc.
|
||||||
key = epoint.split("/")[0] + "s"
|
key = epoint.split("/")[0] + "s"
|
||||||
total = page.get(key, {})
|
items = page.get(key, {})
|
||||||
total = total.get("total") or total.get("items")
|
total = items.get("total", 0) or items.get("items", 0)
|
||||||
|
if limit is not None and limit < total:
|
||||||
|
total = limit
|
||||||
|
|
||||||
|
logger.debug("paginate: %d total items requested", total)
|
||||||
|
|
||||||
if not total:
|
if not total:
|
||||||
logger.debug("Nothing found from %s epoint", epoint)
|
logger.debug("Nothing found from %s epoint", epoint)
|
||||||
return
|
return
|
||||||
|
|
||||||
limit = page.get(key, {}).get("limit", 500)
|
limit = int(page.get(key, {}).get("limit", 500))
|
||||||
offset = page.get(key, {}).get("offset", 0)
|
offset = int(page.get(key, {}).get("offset", 0))
|
||||||
|
|
||||||
|
logger.debug("paginate: from response: limit=%d, offset=%d", limit, offset)
|
||||||
params.update({"limit": limit})
|
params.update({"limit": limit})
|
||||||
yield page
|
yield status, page
|
||||||
while (offset + limit) < total:
|
while (offset + limit) < total:
|
||||||
offset += limit
|
offset += limit
|
||||||
params.update({"offset": offset})
|
params.update({"offset": offset})
|
||||||
response = await self._api_request(epoint, params)
|
yield await self._api_request(epoint, params)
|
||||||
yield await response.json()
|
|
||||||
|
|
||||||
async def _get_app_id_and_secrets(self) -> tuple[str, list[str]]:
|
async def _get_app_id_and_secrets(self) -> tuple[str, list[str]]:
|
||||||
spoofer = QobuzSpoofer()
|
async with QobuzSpoofer() as spoofer:
|
||||||
return await spoofer.get_app_id_and_secrets()
|
return await spoofer.get_app_id_and_secrets()
|
||||||
|
|
||||||
async def _get_valid_secret(self, secrets: list[str]) -> str:
|
async def _get_valid_secret(self, secrets: list[str]) -> str:
|
||||||
|
@ -223,15 +241,18 @@ class QobuzClient(Client):
|
||||||
return working_secrets[0]
|
return working_secrets[0]
|
||||||
|
|
||||||
async def _test_secret(self, secret: str) -> Optional[str]:
|
async def _test_secret(self, secret: str) -> Optional[str]:
|
||||||
resp = await self._request_file_url("19512574", 1, secret)
|
status, _ = await self._request_file_url("19512574", 4, secret)
|
||||||
if resp.status == 400:
|
if status == 400:
|
||||||
return None
|
return None
|
||||||
resp.raise_for_status()
|
if status == 200:
|
||||||
return secret
|
return secret
|
||||||
|
logger.warning("Got status %d when testing secret", status)
|
||||||
|
return None
|
||||||
|
|
||||||
async def _request_file_url(
|
async def _request_file_url(
|
||||||
self, track_id: str, quality: int, secret: str
|
self, track_id: str, quality: int, secret: str
|
||||||
) -> aiohttp.ClientResponse:
|
) -> tuple[int, dict]:
|
||||||
|
quality = self.get_quality(quality)
|
||||||
unix_ts = time.time()
|
unix_ts = time.time()
|
||||||
r_sig = f"trackgetFileUrlformat_id{quality}intentstreamtrack_id{track_id}{unix_ts}{secret}"
|
r_sig = f"trackgetFileUrlformat_id{quality}intentstreamtrack_id{track_id}{unix_ts}{secret}"
|
||||||
logger.debug("Raw request signature: %s", r_sig)
|
logger.debug("Raw request signature: %s", r_sig)
|
||||||
|
@ -246,11 +267,24 @@ class QobuzClient(Client):
|
||||||
}
|
}
|
||||||
return await self._api_request("track/getFileUrl", params)
|
return await self._api_request("track/getFileUrl", params)
|
||||||
|
|
||||||
async def _api_request(self, epoint: str, params: dict) -> aiohttp.ClientResponse:
|
async def _api_request(self, epoint: str, params: dict) -> tuple[int, dict]:
|
||||||
|
"""Make a request to the API.
|
||||||
|
returns: status code, json parsed response
|
||||||
|
"""
|
||||||
url = f"{QOBUZ_BASE_URL}/{epoint}"
|
url = f"{QOBUZ_BASE_URL}/{epoint}"
|
||||||
|
logger.debug("api_request: endpoint=%s, params=%s", epoint, params)
|
||||||
if self.rate_limiter is not None:
|
if self.rate_limiter is not None:
|
||||||
async with self.rate_limiter:
|
async with self.rate_limiter:
|
||||||
|
async with self.session.get(
|
||||||
|
url, params=params, encoding="utf-8"
|
||||||
|
) as response:
|
||||||
|
return response.status, await response.json()
|
||||||
|
# return await self.session.get(url, params=params)
|
||||||
async with self.session.get(url, params=params) as response:
|
async with self.session.get(url, params=params) as response:
|
||||||
return response
|
resp_json = await response.json()
|
||||||
async with self.session.get(url, params=params) as response:
|
return response.status, resp_json
|
||||||
return response
|
|
||||||
|
@staticmethod
|
||||||
|
def get_quality(quality: int):
|
||||||
|
quality_map = (5, 6, 7, 27)
|
||||||
|
return quality_map[quality - 1]
|
||||||
|
|
|
@ -28,9 +28,10 @@ class QobuzSpoofer:
|
||||||
self.app_id_regex = (
|
self.app_id_regex = (
|
||||||
r'production:{api:{appId:"(?P<app_id>\d{9})",appSecret:"(\w{32})'
|
r'production:{api:{appId:"(?P<app_id>\d{9})",appSecret:"(\w{32})'
|
||||||
)
|
)
|
||||||
self.session = aiohttp.ClientSession()
|
self.session = None
|
||||||
|
|
||||||
async def get_app_id_and_secrets(self) -> tuple[str, list[str]]:
|
async def get_app_id_and_secrets(self) -> tuple[str, list[str]]:
|
||||||
|
assert self.session is not None
|
||||||
async with self.session.get("https://play.qobuz.com/login") as req:
|
async with self.session.get("https://play.qobuz.com/login") as req:
|
||||||
login_page = await req.text()
|
login_page = await req.text()
|
||||||
|
|
||||||
|
@ -88,3 +89,12 @@ class QobuzSpoofer:
|
||||||
secrets_list = vals
|
secrets_list = vals
|
||||||
|
|
||||||
return app_id, secrets_list
|
return app_id, secrets_list
|
||||||
|
|
||||||
|
async def __aenter__(self):
|
||||||
|
self.session = aiohttp.ClientSession()
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(self, *_):
|
||||||
|
if self.session is not None:
|
||||||
|
await self.session.close()
|
||||||
|
self.session = None
|
||||||
|
|
|
@ -15,12 +15,12 @@ class SoundcloudClient(Client):
|
||||||
def __init__(self, config: Config):
|
def __init__(self, config: Config):
|
||||||
self.global_config = config
|
self.global_config = config
|
||||||
self.config = config.session.soundcloud
|
self.config = config.session.soundcloud
|
||||||
self.session = self.get_session()
|
|
||||||
self.rate_limiter = self.get_rate_limiter(
|
self.rate_limiter = self.get_rate_limiter(
|
||||||
config.session.downloads.requests_per_minute
|
config.session.downloads.requests_per_minute
|
||||||
)
|
)
|
||||||
|
|
||||||
async def login(self):
|
async def login(self):
|
||||||
|
self.session = await self.get_session()
|
||||||
client_id, app_version = self.config.client_id, self.config.app_version
|
client_id, app_version = self.config.client_id, self.config.app_version
|
||||||
if not client_id or not app_version or not self._announce():
|
if not client_id or not app_version or not self._announce():
|
||||||
client_id, app_version = await self._refresh_tokens()
|
client_id, app_version = await self._refresh_tokens()
|
||||||
|
|
|
@ -1,12 +1,94 @@
|
||||||
|
import os
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Generator
|
from typing import Generator
|
||||||
|
|
||||||
|
import aiofiles
|
||||||
|
import mutagen.id3 as id3
|
||||||
from mutagen.flac import FLAC, Picture
|
from mutagen.flac import FLAC, Picture
|
||||||
from mutagen.id3 import APIC, ID3, ID3NoHeaderError
|
from mutagen.id3 import APIC, ID3, ID3NoHeaderError
|
||||||
from mutagen.mp4 import MP4, MP4Cover
|
from mutagen.mp4 import MP4, MP4Cover
|
||||||
|
|
||||||
from .metadata import TrackMetadata
|
from .metadata import Covers, TrackMetadata
|
||||||
|
|
||||||
|
FLAC_MAX_BLOCKSIZE = 16777215 # 16.7 MB
|
||||||
|
|
||||||
|
MP4_KEYS = (
|
||||||
|
"\xa9nam",
|
||||||
|
"\xa9ART",
|
||||||
|
"\xa9alb",
|
||||||
|
r"aART",
|
||||||
|
"\xa9day",
|
||||||
|
"\xa9day",
|
||||||
|
"\xa9cmt",
|
||||||
|
"desc",
|
||||||
|
"purd",
|
||||||
|
"\xa9grp",
|
||||||
|
"\xa9gen",
|
||||||
|
"\xa9lyr",
|
||||||
|
"\xa9too",
|
||||||
|
"cprt",
|
||||||
|
"cpil",
|
||||||
|
"covr",
|
||||||
|
"trkn",
|
||||||
|
"disk",
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
MP3_KEYS = (
|
||||||
|
id3.TIT2,
|
||||||
|
id3.TPE1,
|
||||||
|
id3.TALB,
|
||||||
|
id3.TPE2,
|
||||||
|
id3.TCOM,
|
||||||
|
id3.TYER,
|
||||||
|
id3.COMM,
|
||||||
|
id3.TT1,
|
||||||
|
id3.TT1,
|
||||||
|
id3.GP1,
|
||||||
|
id3.TCON,
|
||||||
|
id3.USLT,
|
||||||
|
id3.TEN,
|
||||||
|
id3.TCOP,
|
||||||
|
id3.TCMP,
|
||||||
|
None,
|
||||||
|
id3.TRCK,
|
||||||
|
id3.TPOS,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
METADATA_TYPES = (
|
||||||
|
"title",
|
||||||
|
"artist",
|
||||||
|
"album",
|
||||||
|
"albumartist",
|
||||||
|
"composer",
|
||||||
|
"year",
|
||||||
|
"comment",
|
||||||
|
"description",
|
||||||
|
"purchase_date",
|
||||||
|
"grouping",
|
||||||
|
"genre",
|
||||||
|
"lyrics",
|
||||||
|
"encoder",
|
||||||
|
"copyright",
|
||||||
|
"compilation",
|
||||||
|
"cover",
|
||||||
|
"tracknumber",
|
||||||
|
"discnumber",
|
||||||
|
"tracktotal",
|
||||||
|
"disctotal",
|
||||||
|
"date",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
FLAC_KEY = {v: v.upper() for v in METADATA_TYPES}
|
||||||
|
MP4_KEY = dict(zip(METADATA_TYPES, MP4_KEYS))
|
||||||
|
MP3_KEY = dict(zip(METADATA_TYPES, MP3_KEYS))
|
||||||
|
|
||||||
|
|
||||||
class Container(Enum):
|
class Container(Enum):
|
||||||
|
@ -37,10 +119,9 @@ class Container(Enum):
|
||||||
# unreachable
|
# unreachable
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
|
||||||
def _tag_flac(self, meta):
|
def _tag_flac(self, meta):
|
||||||
for k, v in FLAC_KEY.items():
|
for k, v in FLAC_KEY.items():
|
||||||
tag = getattr(meta, k)
|
tag = self._attr_from_meta(meta, k)
|
||||||
if tag:
|
if tag:
|
||||||
if k in {
|
if k in {
|
||||||
"tracknumber",
|
"tracknumber",
|
||||||
|
@ -52,7 +133,6 @@ class Container(Enum):
|
||||||
|
|
||||||
yield (v, str(tag))
|
yield (v, str(tag))
|
||||||
|
|
||||||
|
|
||||||
def _tag_mp3(self, meta):
|
def _tag_mp3(self, meta):
|
||||||
for k, v in MP3_KEY.items():
|
for k, v in MP3_KEY.items():
|
||||||
if k == "tracknumber":
|
if k == "tracknumber":
|
||||||
|
@ -60,7 +140,7 @@ class Container(Enum):
|
||||||
elif k == "discnumber":
|
elif k == "discnumber":
|
||||||
text = f"{meta.discnumber}/{meta.disctotal}"
|
text = f"{meta.discnumber}/{meta.disctotal}"
|
||||||
else:
|
else:
|
||||||
text = getattr(self, k)
|
text = self._attr_from_meta(meta, k)
|
||||||
|
|
||||||
if text is not None and v is not None:
|
if text is not None and v is not None:
|
||||||
yield (v.__name__, v(encoding=3, text=text))
|
yield (v.__name__, v(encoding=3, text=text))
|
||||||
|
@ -72,17 +152,63 @@ class Container(Enum):
|
||||||
elif k == "discnumber":
|
elif k == "discnumber":
|
||||||
text = [(meta.discnumber, meta.disctotal)]
|
text = [(meta.discnumber, meta.disctotal)]
|
||||||
else:
|
else:
|
||||||
text = getattr(self, k)
|
text = self._attr_from_meta(meta, k)
|
||||||
|
|
||||||
if v is not None and text is not None:
|
if v is not None and text is not None:
|
||||||
yield (v, text)
|
yield (v, text)
|
||||||
|
|
||||||
|
def _attr_from_meta(self, meta: TrackMetadata, attr: str) -> str:
|
||||||
|
# TODO: verify this works
|
||||||
|
in_trackmetadata = {
|
||||||
|
"title",
|
||||||
|
"album",
|
||||||
|
"artist",
|
||||||
|
"tracknumber",
|
||||||
|
"discnumber",
|
||||||
|
"composer",
|
||||||
|
}
|
||||||
|
if attr in in_trackmetadata:
|
||||||
|
return str(getattr(meta, attr))
|
||||||
|
else:
|
||||||
|
return str(getattr(meta.album, attr))
|
||||||
|
|
||||||
@dataclass(slots=True)
|
def tag_audio(self, audio, tags):
|
||||||
class Tagger:
|
for k, v in tags:
|
||||||
meta: TrackMetadata
|
audio[k] = v
|
||||||
|
|
||||||
def tag(self, path: str, embed_cover: bool, covers: Cover):
|
async def embed_cover(self, audio, cover_path):
|
||||||
|
if self == Container.FLAC:
|
||||||
|
size = os.path.getsize(cover_path)
|
||||||
|
if size > FLAC_MAX_BLOCKSIZE:
|
||||||
|
raise Exception("Cover art too big for FLAC")
|
||||||
|
cover = Picture()
|
||||||
|
cover.type = 3
|
||||||
|
cover.mime = "image/jpeg"
|
||||||
|
async with aiofiles.open(cover_path, "rb") as img:
|
||||||
|
cover.data = await img.read()
|
||||||
|
audio.add_picture(cover)
|
||||||
|
elif self == Container.MP3:
|
||||||
|
cover = APIC()
|
||||||
|
cover.type = 3
|
||||||
|
cover.mime = "image/jpeg"
|
||||||
|
async with aiofiles.open(cover_path, "rb") as img:
|
||||||
|
cover.data = await img.read()
|
||||||
|
audio.add(cover)
|
||||||
|
elif self == Container.AAC:
|
||||||
|
async with aiofiles.open(cover_path, "rb") as img:
|
||||||
|
cover = MP4Cover(await img.read(), imageformat=MP4Cover.FORMAT_JPEG)
|
||||||
|
audio["covr"] = [cover]
|
||||||
|
|
||||||
|
def save_audio(self, audio, path):
|
||||||
|
if self == Container.FLAC:
|
||||||
|
audio.save()
|
||||||
|
elif self == Container.AAC:
|
||||||
|
audio.save()
|
||||||
|
elif self == Container.MP3:
|
||||||
|
audio.save(path, "v2_version=3")
|
||||||
|
|
||||||
|
|
||||||
|
async def tag_file(path: str, meta: TrackMetadata, cover_path: str | None):
|
||||||
ext = path.split(".")[-1].upper()
|
ext = path.split(".")[-1].upper()
|
||||||
if ext == "flac":
|
if ext == "flac":
|
||||||
container = Container.FLAC
|
container = Container.FLAC
|
||||||
|
@ -94,8 +220,8 @@ class Tagger:
|
||||||
raise Exception(f"Invalid extension {ext}")
|
raise Exception(f"Invalid extension {ext}")
|
||||||
|
|
||||||
audio = container.get_mutagen_class(path)
|
audio = container.get_mutagen_class(path)
|
||||||
tags = container.get_tag_pairs(self.meta)
|
tags = container.get_tag_pairs(meta)
|
||||||
for k, v in tags:
|
container.tag_audio(audio, tags)
|
||||||
audio[k] = v
|
if cover_path is not None:
|
||||||
|
await container.embed_cover(audio, cover_path)
|
||||||
c =
|
container.save_audio(audio, path)
|
||||||
|
|
|
@ -23,12 +23,12 @@ class TidalClient(Client):
|
||||||
self.logged_in = False
|
self.logged_in = False
|
||||||
self.global_config = config
|
self.global_config = config
|
||||||
self.config = config.session.tidal
|
self.config = config.session.tidal
|
||||||
self.session = self.get_session()
|
|
||||||
self.rate_limiter = self.get_rate_limiter(
|
self.rate_limiter = self.get_rate_limiter(
|
||||||
config.session.downloads.requests_per_minute
|
config.session.downloads.requests_per_minute
|
||||||
)
|
)
|
||||||
|
|
||||||
async def login(self):
|
async def login(self):
|
||||||
|
self.session = await self.get_session()
|
||||||
c = self.config
|
c = self.config
|
||||||
if not c.access_token:
|
if not c.access_token:
|
||||||
raise Exception("Access token not found in config.")
|
raise Exception("Access token not found in config.")
|
||||||
|
|
|
@ -1,13 +1,16 @@
|
||||||
|
import asyncio
|
||||||
import os
|
import os
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from . import converter
|
from . import converter
|
||||||
|
from .artwork import downscale_image
|
||||||
from .client import Client
|
from .client import Client
|
||||||
from .config import Config
|
from .config import Config
|
||||||
from .downloadable import Downloadable
|
from .downloadable import BasicDownloadable, Downloadable
|
||||||
from .media import Media, Pending
|
from .media import Media, Pending
|
||||||
from .metadata import AlbumMetadata, TrackMetadata
|
from .metadata import AlbumMetadata, Covers, TrackMetadata
|
||||||
from .progress import get_progress_bar
|
from .progress import get_progress_bar
|
||||||
|
from .tagger import tag_file
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
|
@ -16,28 +19,33 @@ class Track(Media):
|
||||||
downloadable: Downloadable
|
downloadable: Downloadable
|
||||||
config: Config
|
config: Config
|
||||||
folder: str
|
folder: str
|
||||||
|
# Is None if a cover doesn't exist for the track
|
||||||
|
cover_path: str | None
|
||||||
|
# change?
|
||||||
download_path: str = ""
|
download_path: str = ""
|
||||||
|
|
||||||
async def preprocess(self):
|
async def preprocess(self):
|
||||||
folder = self._get_folder(self.folder)
|
self._set_download_path()
|
||||||
os.makedirs(folder, exist_ok=True)
|
os.makedirs(self.folder, exist_ok=True)
|
||||||
# Run in background while track downloads?
|
|
||||||
# Don't download again if part of album
|
|
||||||
await self._download_cover()
|
|
||||||
|
|
||||||
async def download(self):
|
async def download(self):
|
||||||
async with get_progress_bar(self.config, self.downloadable.size()) as bar:
|
# TODO: progress bar description
|
||||||
|
with get_progress_bar(
|
||||||
|
self.config,
|
||||||
|
await self.downloadable.size(),
|
||||||
|
f"Track {self.meta.tracknumber}",
|
||||||
|
) as bar:
|
||||||
await self.downloadable.download(
|
await self.downloadable.download(
|
||||||
self.download_path, lambda x: bar.update(x)
|
self.download_path, lambda x: bar.update(x)
|
||||||
)
|
)
|
||||||
|
|
||||||
async def postprocess(self):
|
async def postprocess(self):
|
||||||
await self._tag()
|
await self._tag()
|
||||||
|
if self.config.session.conversion.enabled:
|
||||||
await self._convert()
|
await self._convert()
|
||||||
|
|
||||||
async def _tag(self):
|
async def _tag(self):
|
||||||
t = Tagger(self.meta)
|
await tag_file(self.download_path, self.meta, self.cover_path)
|
||||||
t.tag(self.download_path)
|
|
||||||
|
|
||||||
async def _convert(self):
|
async def _convert(self):
|
||||||
CONV_CLASS = {
|
CONV_CLASS = {
|
||||||
|
@ -60,10 +68,10 @@ class Track(Media):
|
||||||
engine.convert()
|
engine.convert()
|
||||||
self.download_path = engine.final_fn # because the extension changed
|
self.download_path = engine.final_fn # because the extension changed
|
||||||
|
|
||||||
def _get_folder(self, parent: str) -> str:
|
def _set_download_path(self):
|
||||||
formatter = self.config.session.filepaths.track_format
|
formatter = self.config.session.filepaths.track_format
|
||||||
track_path = self.meta.format_track_path(formatter)
|
track_path = self.meta.format_track_path(formatter)
|
||||||
return os.path.join(self.folder, track_path)
|
self.download_path = os.path.join(self.folder, track_path)
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
|
@ -73,6 +81,7 @@ class PendingTrack(Pending):
|
||||||
client: Client
|
client: Client
|
||||||
config: Config
|
config: Config
|
||||||
folder: str
|
folder: str
|
||||||
|
cover_path: str
|
||||||
|
|
||||||
async def resolve(self) -> Track:
|
async def resolve(self) -> Track:
|
||||||
resp = await self.client.get_metadata({"id": self.id}, "track")
|
resp = await self.client.get_metadata({"id": self.id}, "track")
|
||||||
|
@ -80,4 +89,87 @@ class PendingTrack(Pending):
|
||||||
quality = getattr(self.config.session, self.client.source).quality
|
quality = getattr(self.config.session, self.client.source).quality
|
||||||
assert isinstance(quality, int)
|
assert isinstance(quality, int)
|
||||||
downloadable = await self.client.get_downloadable(self.id, quality)
|
downloadable = await self.client.get_downloadable(self.id, quality)
|
||||||
return Track(meta, downloadable, self.config, self.folder)
|
return Track(meta, downloadable, self.config, self.folder, self.cover_path)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class PendingSingle(Pending):
|
||||||
|
"""Whereas PendingTrack is used in the context of an album, where the album metadata
|
||||||
|
and cover have been resolved, PendingSingle is used when a single track is downloaded.
|
||||||
|
|
||||||
|
This resolves the Album metadata and downloads the cover to pass to the Track class.
|
||||||
|
"""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
client: Client
|
||||||
|
config: Config
|
||||||
|
|
||||||
|
async def resolve(self) -> Track:
|
||||||
|
resp = await self.client.get_metadata({"id": self.id}, "track")
|
||||||
|
album = AlbumMetadata.from_resp(resp["album"], self.client.source)
|
||||||
|
meta = TrackMetadata.from_resp(album, self.client.source, resp)
|
||||||
|
|
||||||
|
quality = getattr(self.config.session, self.client.source).quality
|
||||||
|
assert isinstance(quality, int)
|
||||||
|
folder = self._format_folder(album)
|
||||||
|
os.makedirs(folder, exist_ok=True)
|
||||||
|
|
||||||
|
embedded_cover_path, downloadable = await asyncio.gather(
|
||||||
|
self._download_cover(album.covers, folder),
|
||||||
|
self.client.get_downloadable(self.id, quality),
|
||||||
|
)
|
||||||
|
return Track(meta, downloadable, self.config, folder, embedded_cover_path)
|
||||||
|
|
||||||
|
def _format_folder(self, meta: AlbumMetadata) -> str:
|
||||||
|
c = self.config.session
|
||||||
|
parent = c.downloads.folder
|
||||||
|
formatter = c.filepaths.folder_format
|
||||||
|
return os.path.join(parent, meta.format_folder_path(formatter))
|
||||||
|
|
||||||
|
async def _download_cover(self, covers: Covers, folder: str) -> str | None:
|
||||||
|
"""Download artwork, which may include a seperate file to keep.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
covers (Covers): The set of available covers.
|
||||||
|
|
||||||
|
"""
|
||||||
|
c = self.config.session.artwork
|
||||||
|
if not c.save_artwork and not c.embed:
|
||||||
|
# No need to download anything
|
||||||
|
return None
|
||||||
|
|
||||||
|
session = self.client.session
|
||||||
|
downloadables = []
|
||||||
|
|
||||||
|
hires_cover_path = None
|
||||||
|
if c.save_artwork:
|
||||||
|
l_url, _ = covers.largest()
|
||||||
|
assert l_url is not None
|
||||||
|
hires_cover_path = os.path.join(folder, "cover.jpg")
|
||||||
|
downloadables.append(
|
||||||
|
BasicDownloadable(session, l_url, "jpg").download(
|
||||||
|
hires_cover_path, lambda _: None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
embed_cover_path = None
|
||||||
|
if c.embed:
|
||||||
|
embed_url, _ = covers.get_size(c.embed_size)
|
||||||
|
assert embed_url is not None
|
||||||
|
embed_cover_path = os.path.join(folder, "embed_cover.jpg")
|
||||||
|
downloadables.append(
|
||||||
|
BasicDownloadable(session, embed_url, "jpg").download(
|
||||||
|
embed_cover_path, lambda _: None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await asyncio.gather(*downloadables)
|
||||||
|
|
||||||
|
if c.embed and c.embed_max_width > 0:
|
||||||
|
assert embed_cover_path is not None
|
||||||
|
downscale_image(embed_cover_path, c.embed_max_width)
|
||||||
|
|
||||||
|
if c.save_artwork and c.saved_max_width > 0:
|
||||||
|
assert hires_cover_path is not None
|
||||||
|
downscale_image(hires_cover_path, c.saved_max_width)
|
||||||
|
|
||||||
|
return embed_cover_path
|
||||||
|
|
|
@ -130,13 +130,13 @@ def test_sample_config_data_fields(sample_config_data):
|
||||||
assert sample_config_data.conversion == test_config.conversion
|
assert sample_config_data.conversion == test_config.conversion
|
||||||
|
|
||||||
|
|
||||||
def test_config_save_file_called_on_del(sample_config, mocker):
|
# def test_config_save_file_called_on_del(sample_config, mocker):
|
||||||
sample_config.file.set_modified()
|
# sample_config.file.set_modified()
|
||||||
mockf = mocker.Mock()
|
# mockf = mocker.Mock()
|
||||||
|
#
|
||||||
sample_config.save_file = mockf
|
# sample_config.save_file = mockf
|
||||||
sample_config.__del__()
|
# sample_config.__del__()
|
||||||
mockf.assert_called_once()
|
# mockf.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
def test_config_update_on_save():
|
def test_config_update_on_save():
|
||||||
|
@ -152,17 +152,17 @@ def test_config_update_on_save():
|
||||||
assert conf2.session.downloads.folder == "new_folder"
|
assert conf2.session.downloads.folder == "new_folder"
|
||||||
|
|
||||||
|
|
||||||
def test_config_update_on_del():
|
# def test_config_update_on_del():
|
||||||
tmp_config_path = "tests/config2.toml"
|
# tmp_config_path = "tests/config2.toml"
|
||||||
shutil.copy(SAMPLE_CONFIG, tmp_config_path)
|
# shutil.copy(SAMPLE_CONFIG, tmp_config_path)
|
||||||
conf = Config(tmp_config_path)
|
# conf = Config(tmp_config_path)
|
||||||
conf.file.downloads.folder = "new_folder"
|
# conf.file.downloads.folder = "new_folder"
|
||||||
conf.file.set_modified()
|
# conf.file.set_modified()
|
||||||
del conf
|
# del conf
|
||||||
conf2 = Config(tmp_config_path)
|
# conf2 = Config(tmp_config_path)
|
||||||
os.remove(tmp_config_path)
|
# os.remove(tmp_config_path)
|
||||||
|
#
|
||||||
assert conf2.session.downloads.folder == "new_folder"
|
# assert conf2.session.downloads.folder == "new_folder"
|
||||||
|
|
||||||
|
|
||||||
def test_config_dont_update_without_set_modified():
|
def test_config_dont_update_without_set_modified():
|
||||||
|
|
70
tests/test_qobuz_client.py
Normal file
70
tests/test_qobuz_client.py
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
import asyncio
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from util import afor, arun
|
||||||
|
|
||||||
|
from streamrip.config import Config
|
||||||
|
from streamrip.downloadable import BasicDownloadable
|
||||||
|
from streamrip.exceptions import MissingCredentials
|
||||||
|
from streamrip.qobuz_client import QobuzClient
|
||||||
|
|
||||||
|
logger = logging.getLogger("streamrip")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def config():
|
||||||
|
c = Config.defaults()
|
||||||
|
c.session.qobuz.email_or_userid = os.environ["QOBUZ_EMAIL"]
|
||||||
|
c.session.qobuz.password_or_token = hashlib.md5(
|
||||||
|
os.environ["QOBUZ_PASSWORD"].encode("utf-8")
|
||||||
|
).hexdigest()
|
||||||
|
return c
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client(config):
|
||||||
|
c = QobuzClient(config) # type: ignore
|
||||||
|
arun(c.login())
|
||||||
|
return c
|
||||||
|
|
||||||
|
|
||||||
|
def test_client_raises_missing_credentials():
|
||||||
|
c = Config.defaults()
|
||||||
|
with pytest.raises(MissingCredentials):
|
||||||
|
arun(QobuzClient(c).login())
|
||||||
|
|
||||||
|
|
||||||
|
def test_client_get_metadata(client):
|
||||||
|
meta = arun(client.get_metadata("lzpf67e8f4h1a", "album"))
|
||||||
|
assert meta["title"] == "I Killed Your Dog"
|
||||||
|
assert len(meta["tracks"]["items"]) == 16
|
||||||
|
assert meta["maximum_bit_depth"] == 24
|
||||||
|
|
||||||
|
|
||||||
|
def test_client_get_downloadable(client):
|
||||||
|
d = arun(client.get_downloadable("19512574", 3))
|
||||||
|
assert isinstance(d, BasicDownloadable)
|
||||||
|
assert d.extension == "flac"
|
||||||
|
assert isinstance(d.url, str)
|
||||||
|
assert "https://" in d.url
|
||||||
|
|
||||||
|
|
||||||
|
def test_client_search_limit(client):
|
||||||
|
res = client.search("rumours", "album", limit=5)
|
||||||
|
total = 0
|
||||||
|
for r in afor(res):
|
||||||
|
total += len(r["albums"]["items"])
|
||||||
|
assert total == 5
|
||||||
|
|
||||||
|
|
||||||
|
def test_client_search_no_limit(client):
|
||||||
|
res = client.search("rumours", "album", limit=None)
|
||||||
|
correct_total = 0
|
||||||
|
total = 0
|
||||||
|
for r in afor(res):
|
||||||
|
total += len(r["albums"]["items"])
|
||||||
|
correct_total = max(correct_total, r["albums"]["total"])
|
||||||
|
assert total == correct_total
|
Loading…
Add table
Add a link
Reference in a new issue