mirror of
https://github.com/nathom/streamrip.git
synced 2025-05-09 14:11:55 -04:00
Restructure and add tests
This commit is contained in:
parent
4448220085
commit
06335058f3
42 changed files with 718 additions and 3565 deletions
34
poetry.lock
generated
34
poetry.lock
generated
|
@ -203,7 +203,7 @@ files = [
|
|||
name = "atomicwrites"
|
||||
version = "1.4.1"
|
||||
description = "Atomic file writes."
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
files = [
|
||||
|
@ -718,7 +718,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs
|
|||
name = "iniconfig"
|
||||
version = "2.0.0"
|
||||
description = "brain-dead simple config-ini parsing"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
|
@ -983,7 +983,7 @@ files = [
|
|||
name = "packaging"
|
||||
version = "23.1"
|
||||
description = "Core utilities for Python packages"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
|
@ -1133,7 +1133,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co
|
|||
name = "pluggy"
|
||||
version = "1.3.0"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
|
@ -1149,7 +1149,7 @@ testing = ["pytest", "pytest-benchmark"]
|
|||
name = "py"
|
||||
version = "1.11.0"
|
||||
description = "library with cross-python path, ini-parsing, io, code, log facilities"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
files = [
|
||||
|
@ -1322,7 +1322,7 @@ plugins = ["importlib-metadata"]
|
|||
name = "pytest"
|
||||
version = "6.2.5"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
|
@ -1343,6 +1343,24 @@ toml = "*"
|
|||
[package.extras]
|
||||
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-mock"
|
||||
version = "3.11.1"
|
||||
description = "Thin-wrapper around the mock package for easier use with pytest"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"},
|
||||
{file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=5.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "pytest-asyncio", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
version = "2023.3.post1"
|
||||
|
@ -1670,7 +1688,7 @@ test = ["pytest"]
|
|||
name = "toml"
|
||||
version = "0.10.2"
|
||||
description = "Python Library for Tom's Obvious, Minimal Language"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
files = [
|
||||
|
@ -1959,4 +1977,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.8 <4.0"
|
||||
content-hash = "1cf354944aafbff9fe2ac4bd14695367cb3eb289c8177c55612bc9187dbebbab"
|
||||
content-hash = "e259b509756397e94af7de03b42c5f6e1e62ad24d133daba282fbe095e244824"
|
||||
|
|
|
@ -7,15 +7,18 @@ license = "GPL-3.0-only"
|
|||
readme = "README.md"
|
||||
homepage = "https://github.com/nathom/streamrip"
|
||||
repository = "https://github.com/nathom/streamrip"
|
||||
include = ["streamrip/config.toml"]
|
||||
include = ["src/config.toml"]
|
||||
keywords = ["hi-res", "free", "music", "download"]
|
||||
classifiers = [
|
||||
"License :: OSI Approved :: GNU General Public License (GPL)",
|
||||
"Operating System :: OS Independent",
|
||||
]
|
||||
packages = [
|
||||
{ include = "streamrip" }
|
||||
]
|
||||
|
||||
[tool.poetry.scripts]
|
||||
rip = "src.cli:main"
|
||||
rip = "streamrip.cli:main"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.8 <4.0"
|
||||
|
@ -36,6 +39,7 @@ aiofiles = "^0.7"
|
|||
aiohttp = "^3.7"
|
||||
aiodns = "^3.0.0"
|
||||
aiolimiter = "^1.1.0"
|
||||
pytest-mock = "^3.11.1"
|
||||
|
||||
[tool.poetry.urls]
|
||||
"Bug Reports" = "https://github.com/nathom/streamrip/issues"
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
"""Rip: an easy to use command line utility for downloading audio streams."""
|
|
@ -1,4 +0,0 @@
|
|||
"""Run the rip program."""
|
||||
from .cli import main
|
||||
|
||||
main()
|
|
@ -1,5 +0,0 @@
|
|||
"""Exceptions used by RipCore."""
|
||||
|
||||
|
||||
class DeezloaderFallback(Exception):
|
||||
"""Raise if Deezer account isn't logged in and rip is falling back to Deezloader."""
|
49
rip/utils.py
49
rip/utils.py
|
@ -1,49 +0,0 @@
|
|||
"""Utility functions for RipCore."""
|
||||
|
||||
import re
|
||||
from typing import Tuple
|
||||
|
||||
from streamrip.constants import AGENT
|
||||
from streamrip.utils import gen_threadsafe_session
|
||||
|
||||
interpreter_artist_regex = re.compile(r"getSimilarArtist\(\s*'(\w+)'")
|
||||
|
||||
|
||||
def extract_interpreter_url(url: str) -> str:
|
||||
"""Extract artist ID from a Qobuz interpreter url.
|
||||
|
||||
:param url: Urls of the form "https://www.qobuz.com/us-en/interpreter/{artist}/download-streaming-albums"
|
||||
:type url: str
|
||||
:rtype: str
|
||||
"""
|
||||
session = gen_threadsafe_session({"User-Agent": AGENT})
|
||||
r = session.get(url)
|
||||
match = interpreter_artist_regex.search(r.text)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
raise Exception(
|
||||
"Unable to extract artist id from interpreter url. Use a "
|
||||
"url that contains an artist id."
|
||||
)
|
||||
|
||||
|
||||
deezer_id_link_regex = re.compile(
|
||||
r"https://www\.deezer\.com/[a-z]{2}/(album|artist|playlist|track)/(\d+)"
|
||||
)
|
||||
|
||||
|
||||
def extract_deezer_dynamic_link(url: str) -> Tuple[str, str]:
|
||||
"""Extract a deezer url that includes an ID from a deezer.page.link url.
|
||||
|
||||
:param url:
|
||||
:type url: str
|
||||
:rtype: Tuple[str, str]
|
||||
"""
|
||||
session = gen_threadsafe_session({"User-Agent": AGENT})
|
||||
r = session.get(url)
|
||||
match = deezer_id_link_regex.search(r.text)
|
||||
if match:
|
||||
return match.group(1), match.group(2)
|
||||
|
||||
raise Exception("Unable to extract Deezer dynamic link.")
|
32
src/media.py
32
src/media.py
|
@ -1,32 +0,0 @@
|
|||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Media(ABC):
|
||||
async def rip(self):
|
||||
await self.preprocess()
|
||||
await self.download()
|
||||
await self.postprocess()
|
||||
|
||||
@abstractmethod
|
||||
async def preprocess(self):
|
||||
"""Create directories, download cover art, etc."""
|
||||
raise NotImplemented
|
||||
|
||||
@abstractmethod
|
||||
async def download(self):
|
||||
"""Download and tag the actual audio files in the correct directories."""
|
||||
raise NotImplemented
|
||||
|
||||
@abstractmethod
|
||||
async def postprocess(self):
|
||||
"""Update database, run conversion, delete garbage files etc."""
|
||||
raise NotImplemented
|
||||
|
||||
|
||||
class Pending(ABC):
|
||||
"""A request to download a `Media` whose metadata has not been fetched."""
|
||||
|
||||
@abstractmethod
|
||||
async def resolve(self) -> Media:
|
||||
"""Fetch metadata and resolve into a downloadable `Media` object."""
|
||||
raise NotImplemented
|
49
src/track.py
49
src/track.py
|
@ -1,49 +0,0 @@
|
|||
import os
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .client import Client
|
||||
from .config import Config
|
||||
from .downloadable import Downloadable
|
||||
from .media import Media, Pending
|
||||
from .metadata import AlbumMetadata, TrackMetadata
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class Track(Media):
|
||||
meta: TrackMetadata
|
||||
downloadable: Downloadable
|
||||
config: Config
|
||||
folder: str
|
||||
download_path: str = ""
|
||||
|
||||
async def preprocess(self):
|
||||
folder = self._get_folder(self.folder)
|
||||
os.makedirs(folder, exist_ok=True)
|
||||
# Run in background while track downloads?
|
||||
# Don't download again if part of album
|
||||
await self._download_cover()
|
||||
|
||||
async def download(self):
|
||||
async with get_progress_bar(self.config, self.downloadable.size()) as bar:
|
||||
self.downloadable.download(self.download_path, lambda x: bar.update(x))
|
||||
|
||||
async def postprocess(self):
|
||||
await self.tag()
|
||||
await self.convert()
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class PendingTrack(Pending):
|
||||
id: str
|
||||
album: AlbumMetadata
|
||||
client: Client
|
||||
config: Config
|
||||
folder: str
|
||||
|
||||
async def resolve(self) -> Track:
|
||||
resp = await self.client.get_metadata({"id": self.id}, "track")
|
||||
meta = TrackMetadata.from_resp(self.album, self.client.source, resp)
|
||||
quality = getattr(self.config.session, self.client.source).quality
|
||||
assert isinstance(quality, int)
|
||||
downloadable = await self.client.get_downloadable(self.id, quality)
|
||||
return Track(meta, downloadable, self.config, self.directory)
|
|
@ -1,5 +1 @@
|
|||
"""streamrip: the all in one music downloader."""
|
||||
|
||||
__version__ = "1.9.7"
|
||||
|
||||
from . import clients, constants, converter, downloadtools, media
|
||||
__all__ = ["config"]
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
import copy
|
||||
import logging
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, fields
|
||||
|
||||
from tomlkit.api import dumps, parse
|
||||
from tomlkit.toml_document import TOMLDocument
|
||||
|
@ -217,10 +217,11 @@ class ConfigData:
|
|||
filepaths: FilepathsConfig
|
||||
artwork: ArtworkConfig
|
||||
metadata: MetadataConfig
|
||||
qobuz_filter: QobuzDiscographyFilterConfig
|
||||
qobuz_filters: QobuzDiscographyFilterConfig
|
||||
|
||||
theme: ThemeConfig
|
||||
database: DatabaseConfig
|
||||
conversion: ConversionConfig
|
||||
|
||||
_modified: bool = False
|
||||
|
||||
|
@ -241,9 +242,10 @@ class ConfigData:
|
|||
artwork = ArtworkConfig(**toml["artwork"]) # type: ignore
|
||||
filepaths = FilepathsConfig(**toml["filepaths"]) # type: ignore
|
||||
metadata = MetadataConfig(**toml["metadata"]) # type: ignore
|
||||
qobuz_filter = QobuzDiscographyFilterConfig(**toml["qobuz_filters"]) # type: ignore
|
||||
qobuz_filters = QobuzDiscographyFilterConfig(**toml["qobuz_filters"]) # type: ignore
|
||||
theme = ThemeConfig(**toml["theme"]) # type: ignore
|
||||
database = DatabaseConfig(**toml["database"]) # type: ignore
|
||||
conversion = ConversionConfig(**toml["conversion"]) # type: ignore
|
||||
|
||||
return cls(
|
||||
toml=toml,
|
||||
|
@ -257,9 +259,10 @@ class ConfigData:
|
|||
artwork=artwork,
|
||||
filepaths=filepaths,
|
||||
metadata=metadata,
|
||||
qobuz_filter=qobuz_filter,
|
||||
qobuz_filters=qobuz_filters,
|
||||
theme=theme,
|
||||
database=database,
|
||||
conversion=conversion,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
@ -275,7 +278,25 @@ class ConfigData:
|
|||
return self._modified
|
||||
|
||||
def update_toml(self):
|
||||
pass
|
||||
update_toml_section_from_config(self.toml["downloads"], self.downloads)
|
||||
update_toml_section_from_config(self.toml["qobuz"], self.qobuz)
|
||||
update_toml_section_from_config(self.toml["tidal"], self.tidal)
|
||||
update_toml_section_from_config(self.toml["deezer"], self.deezer)
|
||||
update_toml_section_from_config(self.toml["soundcloud"], self.soundcloud)
|
||||
update_toml_section_from_config(self.toml["youtube"], self.youtube)
|
||||
update_toml_section_from_config(self.toml["lastfm"], self.lastfm)
|
||||
update_toml_section_from_config(self.toml["artwork"], self.artwork)
|
||||
update_toml_section_from_config(self.toml["filepaths"], self.filepaths)
|
||||
update_toml_section_from_config(self.toml["metadata"], self.metadata)
|
||||
update_toml_section_from_config(self.toml["qobuz_filters"], self.qobuz_filters)
|
||||
update_toml_section_from_config(self.toml["theme"], self.theme)
|
||||
update_toml_section_from_config(self.toml["database"], self.database)
|
||||
update_toml_section_from_config(self.toml["conversion"], self.conversion)
|
||||
|
||||
|
||||
def update_toml_section_from_config(toml_section, config):
|
||||
for field in fields(config):
|
||||
toml_section[field.name] = getattr(config, field.name)
|
||||
|
||||
|
||||
class Config:
|
||||
|
@ -294,3 +315,6 @@ class Config:
|
|||
with open(self._path, "w") as toml_file:
|
||||
self.file.update_toml()
|
||||
toml_file.write(dumps(self.file.toml))
|
||||
|
||||
def __del__(self):
|
||||
self.save_file()
|
|
@ -1,195 +0,0 @@
|
|||
"""Constants that are kept in one place."""
|
||||
|
||||
import base64
|
||||
|
||||
import mutagen.id3 as id3
|
||||
|
||||
AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0"
|
||||
|
||||
TIDAL_COVER_URL = "https://resources.tidal.com/images/{uuid}/{width}x{height}.jpg"
|
||||
# Get this from (base64encoded)
|
||||
# aHR0cHM6Ly9hLXYyLnNuZGNkbi5jb20vYXNzZXRzLzItYWIxYjg1NjguanM=
|
||||
# Don't know if this is a static url yet
|
||||
SOUNDCLOUD_CLIENT_ID = "qHsjZaNbdTcABbiIQnVfW07cEPGLNjIh"
|
||||
SOUNDCLOUD_USER_ID = "672320-86895-162383-801513"
|
||||
SOUNDCLOUD_APP_VERSION = "1630917744"
|
||||
|
||||
|
||||
QUALITY_DESC = {
|
||||
0: "128kbps",
|
||||
1: "320kbps",
|
||||
2: "16bit/44.1kHz",
|
||||
3: "24bit/96kHz",
|
||||
4: "24bit/192kHz",
|
||||
}
|
||||
|
||||
QOBUZ_FEATURED_KEYS = (
|
||||
"most-streamed",
|
||||
"recent-releases",
|
||||
"best-sellers",
|
||||
"press-awards",
|
||||
"ideal-discography",
|
||||
"editor-picks",
|
||||
"most-featured",
|
||||
"qobuzissims",
|
||||
"new-releases",
|
||||
"new-releases-full",
|
||||
"harmonia-mundi",
|
||||
"universal-classic",
|
||||
"universal-jazz",
|
||||
"universal-jeunesse",
|
||||
"universal-chanson",
|
||||
)
|
||||
|
||||
__MP4_KEYS = (
|
||||
"\xa9nam",
|
||||
"\xa9ART",
|
||||
"\xa9alb",
|
||||
r"aART",
|
||||
"\xa9day",
|
||||
"\xa9day",
|
||||
"\xa9cmt",
|
||||
"desc",
|
||||
"purd",
|
||||
"\xa9grp",
|
||||
"\xa9gen",
|
||||
"\xa9lyr",
|
||||
"\xa9too",
|
||||
"cprt",
|
||||
"cpil",
|
||||
"covr",
|
||||
"trkn",
|
||||
"disk",
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
__MP3_KEYS = (
|
||||
id3.TIT2,
|
||||
id3.TPE1,
|
||||
id3.TALB,
|
||||
id3.TPE2,
|
||||
id3.TCOM,
|
||||
id3.TYER,
|
||||
id3.COMM,
|
||||
id3.TT1,
|
||||
id3.TT1,
|
||||
id3.GP1,
|
||||
id3.TCON,
|
||||
id3.USLT,
|
||||
id3.TEN,
|
||||
id3.TCOP,
|
||||
id3.TCMP,
|
||||
None,
|
||||
id3.TRCK,
|
||||
id3.TPOS,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
__METADATA_TYPES = (
|
||||
"title",
|
||||
"artist",
|
||||
"album",
|
||||
"albumartist",
|
||||
"composer",
|
||||
"year",
|
||||
"comment",
|
||||
"description",
|
||||
"purchase_date",
|
||||
"grouping",
|
||||
"genre",
|
||||
"lyrics",
|
||||
"encoder",
|
||||
"copyright",
|
||||
"compilation",
|
||||
"cover",
|
||||
"tracknumber",
|
||||
"discnumber",
|
||||
"tracktotal",
|
||||
"disctotal",
|
||||
"date",
|
||||
)
|
||||
|
||||
|
||||
FLAC_KEY = {v: v.upper() for v in __METADATA_TYPES}
|
||||
MP4_KEY = dict(zip(__METADATA_TYPES, __MP4_KEYS))
|
||||
MP3_KEY = dict(zip(__METADATA_TYPES, __MP3_KEYS))
|
||||
|
||||
COPYRIGHT = "\u2117"
|
||||
PHON_COPYRIGHT = "\u00a9"
|
||||
FLAC_MAX_BLOCKSIZE = 16777215 # 16.7 MB
|
||||
|
||||
# TODO: give these more descriptive names
|
||||
TRACK_KEYS = (
|
||||
"tracknumber",
|
||||
"artist",
|
||||
"albumartist",
|
||||
"composer",
|
||||
"title",
|
||||
"albumcomposer",
|
||||
"explicit",
|
||||
)
|
||||
ALBUM_KEYS = (
|
||||
"albumartist",
|
||||
"title",
|
||||
"year",
|
||||
"bit_depth",
|
||||
"sampling_rate",
|
||||
"container",
|
||||
"albumcomposer",
|
||||
"id",
|
||||
)
|
||||
# TODO: rename these to DEFAULT_FOLDER_FORMAT etc
|
||||
FOLDER_FORMAT = (
|
||||
"{albumartist} - {title} ({year}) [{container}] [{bit_depth}B-{sampling_rate}kHz]"
|
||||
)
|
||||
TRACK_FORMAT = "{tracknumber}. {artist} - {title}"
|
||||
|
||||
|
||||
TIDAL_MAX_Q = 7
|
||||
|
||||
TIDAL_Q_MAP = {
|
||||
"LOW": 0,
|
||||
"HIGH": 1,
|
||||
"LOSSLESS": 2,
|
||||
"HI_RES": 3,
|
||||
}
|
||||
|
||||
DEEZER_MAX_Q = 6
|
||||
DEEZER_FEATURED_KEYS = {"releases", "charts", "selection"}
|
||||
AVAILABLE_QUALITY_IDS = (0, 1, 2, 3, 4)
|
||||
DEEZER_FORMATS = {
|
||||
"AAC_64",
|
||||
"MP3_64",
|
||||
"MP3_128",
|
||||
"MP3_256",
|
||||
"MP3_320",
|
||||
"FLAC",
|
||||
}
|
||||
# video only for tidal
|
||||
MEDIA_TYPES = {"track", "album", "artist", "label", "playlist", "video"}
|
||||
|
||||
# used to homogenize cover size keys
|
||||
COVER_SIZES = ("thumbnail", "small", "large", "original")
|
||||
|
||||
TIDAL_CLIENT_INFO = {
|
||||
"id": base64.b64decode("elU0WEhWVmtjMnREUG80dA==").decode("iso-8859-1"),
|
||||
"secret": base64.b64decode(
|
||||
"VkpLaERGcUpQcXZzUFZOQlY2dWtYVEptd2x2YnR0UDd3bE1scmM3MnNlND0="
|
||||
).decode("iso-8859-1"),
|
||||
}
|
||||
|
||||
QOBUZ_BASE = "https://www.qobuz.com/api.json/0.2"
|
||||
|
||||
TIDAL_BASE = "https://api.tidalhifi.com/v1"
|
||||
TIDAL_AUTH_URL = "https://auth.tidal.com/v1/oauth2"
|
||||
|
||||
DEEZER_BASE = "https://api.deezer.com"
|
||||
DEEZER_DL = "http://dz.loaderapp.info/deezer"
|
||||
|
||||
SOUNDCLOUD_BASE = "https://api-v2.soundcloud.com"
|
||||
|
||||
MAX_FILES_OPEN = 128
|
|
@ -1,225 +0,0 @@
|
|||
import asyncio
|
||||
import functools
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from tempfile import gettempdir
|
||||
from typing import Callable, Dict, Iterable, List, Optional
|
||||
|
||||
import aiofiles
|
||||
import aiohttp
|
||||
from Cryptodome.Cipher import Blowfish
|
||||
|
||||
from .exceptions import NonStreamable
|
||||
from .utils import gen_threadsafe_session
|
||||
|
||||
logger = logging.getLogger("streamrip")
|
||||
|
||||
|
||||
class DownloadStream:
|
||||
"""An iterator over chunks of a stream.
|
||||
|
||||
Usage:
|
||||
|
||||
>>> stream = DownloadStream('https://google.com', None)
|
||||
>>> with open('google.html', 'wb') as file:
|
||||
>>> for chunk in stream:
|
||||
>>> file.write(chunk)
|
||||
|
||||
"""
|
||||
|
||||
is_encrypted = re.compile("/m(?:obile|edia)/")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url: str,
|
||||
source: str = None,
|
||||
params: dict = None,
|
||||
headers: dict = None,
|
||||
item_id: str = None,
|
||||
):
|
||||
"""Create an iterable DownloadStream of a URL.
|
||||
|
||||
:param url: The url to download
|
||||
:type url: str
|
||||
:param source: Only applicable for Deezer
|
||||
:type source: str
|
||||
:param params: Parameters to pass in the request
|
||||
:type params: dict
|
||||
:param headers: Headers to pass in the request
|
||||
:type headers: dict
|
||||
:param item_id: (Only for Deezer) the ID of the track
|
||||
:type item_id: str
|
||||
"""
|
||||
self.source = source
|
||||
self.session = gen_threadsafe_session(headers=headers)
|
||||
|
||||
self.id = item_id
|
||||
if isinstance(self.id, int):
|
||||
self.id = str(self.id)
|
||||
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
self.request = self.session.get(
|
||||
url, allow_redirects=True, stream=True, params=params
|
||||
)
|
||||
self.file_size = int(self.request.headers.get("Content-Length", 0))
|
||||
|
||||
if self.file_size < 20000 and not self.url.endswith(".jpg"):
|
||||
import json
|
||||
|
||||
try:
|
||||
info = self.request.json()
|
||||
try:
|
||||
# Usually happens with deezloader downloads
|
||||
raise NonStreamable(f"{info['error']} - {info['message']}")
|
||||
except KeyError:
|
||||
raise NonStreamable(info)
|
||||
|
||||
except json.JSONDecodeError:
|
||||
raise NonStreamable("File not found.")
|
||||
|
||||
def __iter__(self) -> Iterable:
|
||||
"""Iterate through chunks of the stream.
|
||||
|
||||
:rtype: Iterable
|
||||
"""
|
||||
if self.source == "deezer" and self.is_encrypted.search(self.url) is not None:
|
||||
assert isinstance(self.id, str), self.id
|
||||
|
||||
blowfish_key = self._generate_blowfish_key(self.id)
|
||||
# decryptor = self._create_deezer_decryptor(blowfish_key)
|
||||
CHUNK_SIZE = 2048 * 3
|
||||
return (
|
||||
# (decryptor.decrypt(chunk[:2048]) + chunk[2048:])
|
||||
(self._decrypt_chunk(blowfish_key, chunk[:2048]) + chunk[2048:])
|
||||
if len(chunk) >= 2048
|
||||
else chunk
|
||||
for chunk in self.request.iter_content(CHUNK_SIZE)
|
||||
)
|
||||
|
||||
return self.request.iter_content(chunk_size=1024)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return the requested url."""
|
||||
return self.request.url
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Return the value of the "Content-Length" header.
|
||||
|
||||
:rtype: int
|
||||
"""
|
||||
return self.file_size
|
||||
|
||||
def _create_deezer_decryptor(self, key) -> Blowfish:
|
||||
return Blowfish.new(key, Blowfish.MODE_CBC, b"\x00\x01\x02\x03\x04\x05\x06\x07")
|
||||
|
||||
@staticmethod
|
||||
def _generate_blowfish_key(track_id: str):
|
||||
"""Generate the blowfish key for Deezer downloads.
|
||||
|
||||
:param track_id:
|
||||
:type track_id: str
|
||||
"""
|
||||
SECRET = "g4el58wc0zvf9na1"
|
||||
md5_hash = hashlib.md5(track_id.encode()).hexdigest()
|
||||
# good luck :)
|
||||
return "".join(
|
||||
chr(functools.reduce(lambda x, y: x ^ y, map(ord, t)))
|
||||
for t in zip(md5_hash[:16], md5_hash[16:], SECRET)
|
||||
).encode()
|
||||
|
||||
@staticmethod
|
||||
def _decrypt_chunk(key, data):
|
||||
"""Decrypt a chunk of a Deezer stream.
|
||||
|
||||
:param key:
|
||||
:param data:
|
||||
"""
|
||||
return Blowfish.new(
|
||||
key,
|
||||
Blowfish.MODE_CBC,
|
||||
b"\x00\x01\x02\x03\x04\x05\x06\x07",
|
||||
).decrypt(data)
|
||||
|
||||
|
||||
class DownloadPool:
|
||||
"""Asynchronously download a set of urls."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
urls: Iterable,
|
||||
tempdir: str = None,
|
||||
chunk_callback: Optional[Callable] = None,
|
||||
):
|
||||
self.finished: bool = False
|
||||
# Enumerate urls to know the order
|
||||
self.urls = dict(enumerate(urls))
|
||||
self._downloaded_urls: List[str] = []
|
||||
# {url: path}
|
||||
self._paths: Dict[str, str] = {}
|
||||
self.task: Optional[asyncio.Task] = None
|
||||
|
||||
if tempdir is None:
|
||||
tempdir = gettempdir()
|
||||
self.tempdir = tempdir
|
||||
|
||||
async def getfn(self, url):
|
||||
path = os.path.join(self.tempdir, f"__streamrip_partial_{abs(hash(url))}")
|
||||
self._paths[url] = path
|
||||
return path
|
||||
|
||||
async def _download_urls(self):
|
||||
async with aiohttp.ClientSession() as session:
|
||||
tasks = [
|
||||
asyncio.ensure_future(self._download_url(session, url))
|
||||
for url in self.urls.values()
|
||||
]
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
async def _download_url(self, session, url):
|
||||
filename = await self.getfn(url)
|
||||
logger.debug("Downloading %s", url)
|
||||
async with session.get(url) as response, aiofiles.open(filename, "wb") as f:
|
||||
# without aiofiles 3.6632679780000004s
|
||||
# with aiofiles 2.504482839s
|
||||
await f.write(await response.content.read())
|
||||
|
||||
if self.callback:
|
||||
self.callback()
|
||||
|
||||
logger.debug("Finished %s", url)
|
||||
|
||||
def download(self, callback=None):
|
||||
self.callback = callback
|
||||
asyncio.run(self._download_urls())
|
||||
|
||||
@property
|
||||
def files(self):
|
||||
if len(self._paths) != len(self.urls):
|
||||
# Not all of them have downloaded
|
||||
raise Exception("Must run DownloadPool.download() before accessing files")
|
||||
|
||||
return [
|
||||
os.path.join(self.tempdir, self._paths[self.urls[i]])
|
||||
for i in range(len(self.urls))
|
||||
]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.urls)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
logger.debug("Removing tempfiles %s", self._paths)
|
||||
for file in self._paths.values():
|
||||
try:
|
||||
os.remove(file)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
return False
|
35
streamrip/filepath_utils.py
Normal file
35
streamrip/filepath_utils.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
from string import Formatter, printable
|
||||
|
||||
from pathvalidate import sanitize_filename
|
||||
|
||||
|
||||
def clean_filename(fn: str, restrict=False) -> str:
|
||||
path = str(sanitize_filename(fn))
|
||||
if restrict:
|
||||
allowed_chars = set(printable)
|
||||
path = "".join(c for c in path if c in allowed_chars)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def clean_format(formatter: str, format_info: dict, restrict: bool = False) -> str:
|
||||
"""Format track or folder names sanitizing every formatter key.
|
||||
|
||||
:param formatter:
|
||||
:type formatter: str
|
||||
:param kwargs:
|
||||
"""
|
||||
fmt_keys = filter(None, (i[1] for i in Formatter().parse(formatter)))
|
||||
|
||||
clean_dict = {}
|
||||
for key in fmt_keys:
|
||||
if isinstance(format_info.get(key), (str, float)):
|
||||
clean_dict[key] = clean_filename(str(format_info[key]), restrict=restrict)
|
||||
elif key == "explicit":
|
||||
clean_dict[key] = " (Explicit) " if format_info.get(key, False) else ""
|
||||
elif isinstance(format_info.get(key), int): # track/discnumber
|
||||
clean_dict[key] = f"{format_info[key]:02}"
|
||||
else:
|
||||
clean_dict[key] = "Unknown"
|
||||
|
||||
return formatter.format(**clean_dict)
|
2393
streamrip/media.py
2393
streamrip/media.py
File diff suppressed because it is too large
Load diff
|
@ -6,6 +6,7 @@ import logging
|
|||
import re
|
||||
from collections import OrderedDict
|
||||
from dataclasses import dataclass
|
||||
from string import Formatter
|
||||
from typing import Generator, Hashable, Iterable, Optional, Type, Union
|
||||
|
||||
from .constants import (
|
||||
|
@ -19,7 +20,7 @@ from .constants import (
|
|||
TRACK_KEYS,
|
||||
)
|
||||
from .exceptions import InvalidContainerError, InvalidSourceError
|
||||
from .utils import get_cover_urls, get_quality_id, safe_get
|
||||
from .utils import get_cover_urls, get_quality_id
|
||||
|
||||
logger = logging.getLogger("streamrip")
|
||||
|
||||
|
@ -82,6 +83,9 @@ class TrackMetadata:
|
|||
return cls.from_deezer(album, resp)
|
||||
raise Exception
|
||||
|
||||
def format_track_path(self, formatter: str):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TrackInfo:
|
||||
|
@ -219,3 +223,23 @@ class AlbumInfo:
|
|||
booklets = None
|
||||
work: Optional[str] = None
|
||||
|
||||
|
||||
_formatter = Formatter()
|
||||
|
||||
|
||||
def keys_in_format_string(s: str):
|
||||
"""Returns the items in {} in a format string."""
|
||||
return [f[1] for f in _formatter.parse(s) if f[1] is not None]
|
||||
|
||||
|
||||
def safe_get(d: dict, *keys, default=None):
|
||||
"""Nested __getitem__ calls with a default value.
|
||||
|
||||
Use to avoid key not found errors.
|
||||
"""
|
||||
_d = d
|
||||
for k in keys:
|
||||
_d = _d.get(k, {})
|
||||
if _d == {}:
|
||||
return default
|
||||
return _d
|
27
streamrip/progress.py
Normal file
27
streamrip/progress.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
from typing import Optional
|
||||
|
||||
from click import style
|
||||
from tqdm import tqdm
|
||||
|
||||
THEMES = {
|
||||
"plain": None,
|
||||
"dainty": (
|
||||
"{desc} |{bar}| "
|
||||
+ style("{remaining}", fg="magenta")
|
||||
+ " left at "
|
||||
+ style("{rate_fmt}{postfix} ", fg="cyan", bold=True)
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def get_progress_bar(total, theme="dainty", desc: Optional[str] = None, unit="B"):
|
||||
theme = THEMES[theme]
|
||||
return tqdm(
|
||||
total=total,
|
||||
unit=unit,
|
||||
unit_scale=True,
|
||||
unit_divisor=1024,
|
||||
desc=desc,
|
||||
dynamic_ncols=True,
|
||||
bar_format=theme,
|
||||
)
|
101
streamrip/tagger.py
Normal file
101
streamrip/tagger.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Generator
|
||||
|
||||
from mutagen.flac import FLAC, Picture
|
||||
from mutagen.id3 import APIC, ID3, ID3NoHeaderError
|
||||
from mutagen.mp4 import MP4, MP4Cover
|
||||
|
||||
from .metadata import TrackMetadata
|
||||
|
||||
|
||||
class Container(Enum):
|
||||
FLAC = 1
|
||||
AAC = 2
|
||||
MP3 = 3
|
||||
|
||||
def get_mutagen_class(self, path: str):
|
||||
if self == Container.FLAC:
|
||||
return FLAC(path)
|
||||
elif self == Container.AAC:
|
||||
return MP4(path)
|
||||
elif self == Container.MP3:
|
||||
try:
|
||||
return ID3(path)
|
||||
except ID3NoHeaderError:
|
||||
return ID3()
|
||||
# unreachable
|
||||
return {}
|
||||
|
||||
def get_tag_pairs(self, meta) -> Generator:
|
||||
if self == Container.FLAC:
|
||||
return self._tag_flac(meta)
|
||||
elif self == Container.MP3:
|
||||
return self._tag_mp3(meta)
|
||||
elif self == Container.AAC:
|
||||
return self._tag_aac(meta)
|
||||
# unreachable
|
||||
yield
|
||||
|
||||
|
||||
def _tag_flac(self, meta):
|
||||
for k, v in FLAC_KEY.items():
|
||||
tag = getattr(meta, k)
|
||||
if tag:
|
||||
if k in {
|
||||
"tracknumber",
|
||||
"discnumber",
|
||||
"tracktotal",
|
||||
"disctotal",
|
||||
}:
|
||||
tag = f"{int(tag):02}"
|
||||
|
||||
yield (v, str(tag))
|
||||
|
||||
|
||||
def _tag_mp3(self, meta):
|
||||
for k, v in MP3_KEY.items():
|
||||
if k == "tracknumber":
|
||||
text = f"{meta.tracknumber}/{meta.tracktotal}"
|
||||
elif k == "discnumber":
|
||||
text = f"{meta.discnumber}/{meta.disctotal}"
|
||||
else:
|
||||
text = getattr(self, k)
|
||||
|
||||
if text is not None and v is not None:
|
||||
yield (v.__name__, v(encoding=3, text=text))
|
||||
|
||||
def _tag_aac(self, meta):
|
||||
for k, v in MP4_KEY.items():
|
||||
if k == "tracknumber":
|
||||
text = [(meta.tracknumber, meta.tracktotal)]
|
||||
elif k == "discnumber":
|
||||
text = [(meta.discnumber, meta.disctotal)]
|
||||
else:
|
||||
text = getattr(self, k)
|
||||
|
||||
if v is not None and text is not None:
|
||||
yield (v, text)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class Tagger:
|
||||
meta: TrackMetadata
|
||||
|
||||
def tag(self, path: str, embed_cover: bool, covers: Cover):
|
||||
ext = path.split(".")[-1].upper()
|
||||
if ext == "flac":
|
||||
container = Container.FLAC
|
||||
elif ext == "m4a":
|
||||
container = Container.AAC
|
||||
elif ext == "mp3":
|
||||
container = Container.MP3
|
||||
else:
|
||||
raise Exception(f"Invalid extension {ext}")
|
||||
|
||||
audio = container.get_mutagen_class(path)
|
||||
tags = container.get_tag_pairs(self.meta)
|
||||
for k, v in tags:
|
||||
audio[k] = v
|
||||
|
||||
c =
|
83
streamrip/track.py
Normal file
83
streamrip/track.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import os
|
||||
from dataclasses import dataclass
|
||||
|
||||
from . import converter
|
||||
from .client import Client
|
||||
from .config import Config
|
||||
from .downloadable import Downloadable
|
||||
from .media import Media, Pending
|
||||
from .metadata import AlbumMetadata, TrackMetadata
|
||||
from .progress import get_progress_bar
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class Track(Media):
|
||||
meta: TrackMetadata
|
||||
downloadable: Downloadable
|
||||
config: Config
|
||||
folder: str
|
||||
download_path: str = ""
|
||||
|
||||
async def preprocess(self):
|
||||
folder = self._get_folder(self.folder)
|
||||
os.makedirs(folder, exist_ok=True)
|
||||
# Run in background while track downloads?
|
||||
# Don't download again if part of album
|
||||
await self._download_cover()
|
||||
|
||||
async def download(self):
|
||||
async with get_progress_bar(self.config, self.downloadable.size()) as bar:
|
||||
await self.downloadable.download(
|
||||
self.download_path, lambda x: bar.update(x)
|
||||
)
|
||||
|
||||
async def postprocess(self):
|
||||
await self._tag()
|
||||
await self._convert()
|
||||
|
||||
async def _tag(self):
|
||||
t = Tagger(self.meta)
|
||||
t.tag(self.download_path)
|
||||
|
||||
async def _convert(self):
|
||||
CONV_CLASS = {
|
||||
"FLAC": converter.FLAC,
|
||||
"ALAC": converter.ALAC,
|
||||
"MP3": converter.LAME,
|
||||
"OPUS": converter.OPUS,
|
||||
"OGG": converter.Vorbis,
|
||||
"VORBIS": converter.Vorbis,
|
||||
"AAC": converter.AAC,
|
||||
"M4A": converter.AAC,
|
||||
}
|
||||
c = self.config.session.conversion
|
||||
codec = c.codec
|
||||
engine = CONV_CLASS[codec.upper()](
|
||||
filename=self.download_path,
|
||||
sampling_rate=c.sampling_rate,
|
||||
remove_source=True, # always going to delete the old file
|
||||
)
|
||||
engine.convert()
|
||||
self.download_path = engine.final_fn # because the extension changed
|
||||
|
||||
def _get_folder(self, parent: str) -> str:
|
||||
formatter = self.config.session.filepaths.track_format
|
||||
track_path = self.meta.format_track_path(formatter)
|
||||
return os.path.join(self.folder, track_path)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class PendingTrack(Pending):
|
||||
id: str
|
||||
album: AlbumMetadata
|
||||
client: Client
|
||||
config: Config
|
||||
folder: str
|
||||
|
||||
async def resolve(self) -> Track:
|
||||
resp = await self.client.get_metadata({"id": self.id}, "track")
|
||||
meta = TrackMetadata.from_resp(self.album, self.client.source, resp)
|
||||
quality = getattr(self.config.session, self.client.source).quality
|
||||
assert isinstance(quality, int)
|
||||
downloadable = await self.client.get_downloadable(self.id, quality)
|
||||
return Track(meta, downloadable, self.config, self.folder)
|
|
@ -1,476 +0,0 @@
|
|||
"""Miscellaneous utility functions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from string import Formatter
|
||||
from typing import Dict, Hashable, Iterator, List, Optional, Tuple, Union
|
||||
|
||||
import requests
|
||||
from click import secho, style
|
||||
from pathvalidate import sanitize_filename
|
||||
from requests.packages import urllib3
|
||||
from tqdm import tqdm
|
||||
|
||||
from .constants import COVER_SIZES, MAX_FILES_OPEN, TIDAL_COVER_URL
|
||||
from .exceptions import FfmpegError, InvalidQuality, InvalidSourceError
|
||||
|
||||
urllib3.disable_warnings()
|
||||
logger = logging.getLogger("streamrip")
|
||||
|
||||
|
||||
def concat_audio_files(paths: List[str], out: str, ext: str):
|
||||
logger.debug("Concatenating %d files", len(paths))
|
||||
if len(paths) == 1:
|
||||
shutil.move(paths[0], out)
|
||||
return
|
||||
|
||||
it = iter(paths)
|
||||
num_batches = len(paths) // MAX_FILES_OPEN + (
|
||||
1 if len(paths) % MAX_FILES_OPEN != 0 else 0
|
||||
)
|
||||
logger.debug(
|
||||
"Using %d batches with max file limit of %d", num_batches, MAX_FILES_OPEN
|
||||
)
|
||||
tempdir = tempfile.gettempdir()
|
||||
outpaths = [
|
||||
os.path.join(
|
||||
tempdir, f"__streamrip_ffmpeg_{hash(paths[i*MAX_FILES_OPEN])}.{ext}"
|
||||
)
|
||||
for i in range(num_batches)
|
||||
]
|
||||
|
||||
for p in outpaths:
|
||||
try:
|
||||
os.remove(p) # in case of failure
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
logger.debug("Batch outfiles: %s", outpaths)
|
||||
|
||||
for i in range(num_batches):
|
||||
logger.debug("Batch %d", i)
|
||||
proc = subprocess.run(
|
||||
(
|
||||
"ffmpeg",
|
||||
"-i",
|
||||
f"concat:{'|'.join(itertools.islice(it, MAX_FILES_OPEN))}",
|
||||
"-acodec",
|
||||
"copy",
|
||||
"-loglevel",
|
||||
"panic",
|
||||
outpaths[i],
|
||||
),
|
||||
# capture_output=True,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
raise FfmpegError(proc.stderr)
|
||||
|
||||
concat_audio_files(outpaths, out, ext)
|
||||
|
||||
|
||||
def clean_filename(fn: str, restrict=False) -> str:
|
||||
path = sanitize_filename(fn)
|
||||
if restrict:
|
||||
from string import printable
|
||||
|
||||
allowed_chars = set(printable)
|
||||
path = "".join(c for c in path if c in allowed_chars)
|
||||
|
||||
return path
|
||||
|
||||
|
||||
__QUALITY_MAP: Dict[str, Dict[int, Union[int, str, Tuple[int, str]]]] = {
|
||||
"qobuz": {
|
||||
1: 5,
|
||||
2: 6,
|
||||
3: 7,
|
||||
4: 27,
|
||||
},
|
||||
"deezer": {
|
||||
0: (9, "MP3_128"),
|
||||
1: (3, "MP3_320"),
|
||||
2: (1, "FLAC"),
|
||||
},
|
||||
"tidal": {
|
||||
0: "LOW", # AAC
|
||||
1: "HIGH", # AAC
|
||||
2: "LOSSLESS", # CD Quality
|
||||
3: "HI_RES", # MQA
|
||||
},
|
||||
"deezloader": {
|
||||
0: 128,
|
||||
1: 320,
|
||||
2: 1411,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def get_quality(quality_id: int, source: str) -> Union[str, int, Tuple[int, str]]:
|
||||
"""Get the source-specific quality id.
|
||||
|
||||
:param quality_id: the universal quality id (0, 1, 2, 4)
|
||||
:type quality_id: int
|
||||
:param source: qobuz, tidal, or deezer
|
||||
:type source: str
|
||||
:rtype: Union[str, int]
|
||||
"""
|
||||
return __QUALITY_MAP[source][quality_id]
|
||||
|
||||
|
||||
def get_quality_id(bit_depth: Optional[int], sampling_rate: Optional[int]):
|
||||
"""Get the universal quality id from bit depth and sampling rate.
|
||||
|
||||
:param bit_depth:
|
||||
:type bit_depth: Optional[int]
|
||||
:param sampling_rate:
|
||||
:type sampling_rate: Optional[int]
|
||||
"""
|
||||
# XXX: Should `0` quality be supported?
|
||||
if bit_depth is None or sampling_rate is None: # is lossy
|
||||
return 1
|
||||
|
||||
if bit_depth == 16:
|
||||
return 2
|
||||
|
||||
if bit_depth == 24:
|
||||
if sampling_rate <= 96:
|
||||
return 3
|
||||
|
||||
return 4
|
||||
|
||||
|
||||
def get_stats_from_quality(
|
||||
quality_id: int,
|
||||
) -> Tuple[Optional[int], Optional[int]]:
|
||||
"""Get bit depth and sampling rate based on the quality id.
|
||||
|
||||
:param quality_id:
|
||||
:type quality_id: int
|
||||
:rtype: Tuple[Optional[int], Optional[int]]
|
||||
"""
|
||||
if quality_id <= 1:
|
||||
return (None, None)
|
||||
elif quality_id == 2:
|
||||
return (16, 44100)
|
||||
elif quality_id == 3:
|
||||
return (24, 96000)
|
||||
elif quality_id == 4:
|
||||
return (24, 192000)
|
||||
else:
|
||||
raise InvalidQuality(quality_id)
|
||||
|
||||
|
||||
def clean_format(formatter: str, format_info, restrict: bool = False):
|
||||
"""Format track or folder names sanitizing every formatter key.
|
||||
|
||||
:param formatter:
|
||||
:type formatter: str
|
||||
:param kwargs:
|
||||
"""
|
||||
fmt_keys = filter(None, (i[1] for i in Formatter().parse(formatter)))
|
||||
# fmt_keys = (i[1] for i in Formatter().parse(formatter) if i[1] is not None)
|
||||
|
||||
logger.debug("Formatter keys: %s", formatter)
|
||||
|
||||
clean_dict = {}
|
||||
for key in fmt_keys:
|
||||
logger.debug(repr(key))
|
||||
logger.debug(format_info.get(key))
|
||||
if isinstance(format_info.get(key), (str, float)):
|
||||
logger.debug("1")
|
||||
clean_dict[key] = clean_filename(str(format_info[key]), restrict=restrict)
|
||||
elif key == "explicit":
|
||||
logger.debug("3")
|
||||
clean_dict[key] = " (Explicit) " if format_info.get(key, False) else ""
|
||||
elif isinstance(format_info.get(key), int): # track/discnumber
|
||||
logger.debug("2")
|
||||
clean_dict[key] = f"{format_info[key]:02}"
|
||||
else:
|
||||
clean_dict[key] = "Unknown"
|
||||
|
||||
return formatter.format(**clean_dict)
|
||||
|
||||
|
||||
def tidal_cover_url(uuid, size):
|
||||
"""Generate a tidal cover url.
|
||||
|
||||
:param uuid:
|
||||
:param size:
|
||||
"""
|
||||
possibles = (80, 160, 320, 640, 1280)
|
||||
assert size in possibles, f"size must be in {possibles}"
|
||||
|
||||
# A common occurance is a valid size but no uuid
|
||||
if not uuid:
|
||||
return None
|
||||
return TIDAL_COVER_URL.format(uuid=uuid.replace("-", "/"), height=size, width=size)
|
||||
|
||||
|
||||
def decrypt_mqa_file(in_path, out_path, encryption_key):
|
||||
"""Decrypt an MQA file.
|
||||
|
||||
:param in_path:
|
||||
:param out_path:
|
||||
:param encryption_key:
|
||||
"""
|
||||
try:
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util import Counter
|
||||
except (ImportError, ModuleNotFoundError):
|
||||
secho(
|
||||
"To download this item in MQA, you need to run ",
|
||||
fg="yellow",
|
||||
nl=False,
|
||||
)
|
||||
secho("pip3 install pycryptodome --upgrade", fg="blue", nl=False)
|
||||
secho(".")
|
||||
exit()
|
||||
|
||||
# Do not change this
|
||||
master_key = "UIlTTEMmmLfGowo/UC60x2H45W6MdGgTRfo/umg4754="
|
||||
|
||||
# Decode the base64 strings to ascii strings
|
||||
master_key = base64.b64decode(master_key)
|
||||
security_token = base64.b64decode(encryption_key)
|
||||
|
||||
# Get the IV from the first 16 bytes of the securityToken
|
||||
iv = security_token[:16]
|
||||
encrypted_st = security_token[16:]
|
||||
|
||||
# Initialize decryptor
|
||||
decryptor = AES.new(master_key, AES.MODE_CBC, iv)
|
||||
|
||||
# Decrypt the security token
|
||||
decrypted_st = decryptor.decrypt(encrypted_st)
|
||||
|
||||
# Get the audio stream decryption key and nonce from the decrypted security token
|
||||
key = decrypted_st[:16]
|
||||
nonce = decrypted_st[16:24]
|
||||
|
||||
counter = Counter.new(64, prefix=nonce, initial_value=0)
|
||||
decryptor = AES.new(key, AES.MODE_CTR, counter=counter)
|
||||
|
||||
with open(in_path, "rb") as enc_file:
|
||||
dec_bytes = decryptor.decrypt(enc_file.read())
|
||||
with open(out_path, "wb") as dec_file:
|
||||
dec_file.write(dec_bytes)
|
||||
|
||||
|
||||
def ext(quality: int, source: str):
|
||||
"""Get the extension of an audio file.
|
||||
|
||||
:param quality:
|
||||
:type quality: int
|
||||
:param source:
|
||||
:type source: str
|
||||
"""
|
||||
if quality <= 1:
|
||||
if source == "tidal":
|
||||
return ".m4a"
|
||||
else:
|
||||
return ".mp3"
|
||||
else:
|
||||
return ".flac"
|
||||
|
||||
|
||||
def gen_threadsafe_session(
|
||||
headers: dict = None, pool_connections: int = 100, pool_maxsize: int = 100
|
||||
) -> requests.Session:
|
||||
"""Create a new Requests session with a large poolsize.
|
||||
|
||||
:param headers:
|
||||
:type headers: dict
|
||||
:param pool_connections:
|
||||
:type pool_connections: int
|
||||
:param pool_maxsize:
|
||||
:type pool_maxsize: int
|
||||
:rtype: requests.Session
|
||||
"""
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
session = requests.Session()
|
||||
adapter = requests.adapters.HTTPAdapter(pool_connections=100, pool_maxsize=100)
|
||||
session.mount("https://", adapter)
|
||||
session.headers.update(headers)
|
||||
return session
|
||||
|
||||
|
||||
def decho(message, fg=None):
|
||||
"""Debug echo the message.
|
||||
|
||||
:param message:
|
||||
:param fg: ANSI color with which to display the message on the
|
||||
screen
|
||||
"""
|
||||
secho(message, fg=fg)
|
||||
logger.debug(message)
|
||||
|
||||
|
||||
def get_container(quality: int, source: str) -> str:
|
||||
"""Get the file container given the quality.
|
||||
|
||||
`container` can also be the the codec; both work.
|
||||
|
||||
:param quality: quality id
|
||||
:type quality: int
|
||||
:param source:
|
||||
:type source: str
|
||||
:rtype: str
|
||||
"""
|
||||
if quality >= 2:
|
||||
return "FLAC"
|
||||
|
||||
if source == "tidal":
|
||||
return "AAC"
|
||||
|
||||
return "MP3"
|
||||
|
||||
|
||||
def get_cover_urls(resp: dict, source: str) -> Optional[dict]:
|
||||
"""Parse a response dict containing cover info according to the source.
|
||||
|
||||
:param resp:
|
||||
:type resp: dict
|
||||
:param source:
|
||||
:type source: str
|
||||
:rtype: dict
|
||||
"""
|
||||
|
||||
if source == "qobuz":
|
||||
cover_urls = resp["image"]
|
||||
cover_urls["original"] = "org".join(cover_urls["large"].rsplit("600", 1))
|
||||
return cover_urls
|
||||
|
||||
if source == "tidal":
|
||||
uuid = resp["cover"]
|
||||
if not uuid:
|
||||
return None
|
||||
return {
|
||||
sk: tidal_cover_url(uuid, size)
|
||||
for sk, size in zip(COVER_SIZES, (160, 320, 640, 1280))
|
||||
}
|
||||
|
||||
if source == "deezer":
|
||||
resp_keys = ("cover", "cover_medium", "cover_large", "cover_xl")
|
||||
resp_keys_fallback = (
|
||||
"picture",
|
||||
"picture_medium",
|
||||
"picture_large",
|
||||
"picture_xl",
|
||||
)
|
||||
cover_urls = {
|
||||
sk: resp.get(rk, resp.get(rkf)) # size key, resp key, resp key fallback
|
||||
for sk, rk, rkf in zip(
|
||||
COVER_SIZES,
|
||||
resp_keys,
|
||||
resp_keys_fallback,
|
||||
)
|
||||
}
|
||||
|
||||
if cover_urls["large"] is None and resp.get("cover_big") is not None:
|
||||
cover_urls["large"] = resp["cover_big"]
|
||||
|
||||
return cover_urls
|
||||
|
||||
if source == "soundcloud":
|
||||
cover_url = (resp["artwork_url"] or resp["user"].get("avatar_url")).replace(
|
||||
"large", "t500x500"
|
||||
)
|
||||
|
||||
cover_urls = {"large": cover_url}
|
||||
|
||||
return cover_urls
|
||||
|
||||
raise InvalidSourceError(source)
|
||||
|
||||
|
||||
def downsize_image(filepath: str, width: int, height: int):
|
||||
"""Downsize an image.
|
||||
|
||||
If either the width or the height is greater than the image's width or
|
||||
height, that dimension will not be changed.
|
||||
|
||||
:param filepath:
|
||||
:type filepath: str
|
||||
:param width:
|
||||
:type width: int
|
||||
:param height:
|
||||
:type height: int
|
||||
:raises: ValueError
|
||||
"""
|
||||
if width == -1 or height == -1:
|
||||
return
|
||||
|
||||
from PIL import Image, UnidentifiedImageError
|
||||
|
||||
try:
|
||||
image = Image.open(filepath)
|
||||
except UnidentifiedImageError:
|
||||
secho("Cover art not found, skipping downsize.", fg="red")
|
||||
return
|
||||
|
||||
width = min(width, image.width)
|
||||
height = min(height, image.height)
|
||||
|
||||
resized_image = image.resize((width, height))
|
||||
resized_image.save(filepath)
|
||||
|
||||
|
||||
TQDM_THEMES = {
|
||||
"plain": None,
|
||||
"dainty": (
|
||||
"{desc} |{bar}| "
|
||||
+ style("{remaining}", fg="magenta")
|
||||
+ " left at "
|
||||
+ style("{rate_fmt}{postfix} ", fg="cyan", bold=True)
|
||||
),
|
||||
}
|
||||
|
||||
TQDM_DEFAULT_THEME = "dainty"
|
||||
|
||||
TQDM_BAR_FORMAT = TQDM_THEMES["dainty"]
|
||||
|
||||
|
||||
def set_progress_bar_theme(theme: str):
|
||||
"""Set the theme of the tqdm progress bar.
|
||||
|
||||
:param theme:
|
||||
:type theme: str
|
||||
"""
|
||||
global TQDM_BAR_FORMAT
|
||||
TQDM_BAR_FORMAT = TQDM_THEMES[theme]
|
||||
|
||||
|
||||
def tqdm_stream(iterator, desc: Optional[str] = None) -> Iterator[bytes]:
|
||||
"""Return a tqdm bar with presets appropriate for downloading large files.
|
||||
|
||||
:param iterator:
|
||||
:type iterator: DownloadStream
|
||||
:param desc: Description to add for the progress bar
|
||||
:type desc: Optional[str]
|
||||
:rtype: Iterator
|
||||
"""
|
||||
with get_tqdm_bar(len(iterator), desc=desc) as bar:
|
||||
for chunk in iterator:
|
||||
bar.update(len(chunk))
|
||||
yield chunk
|
||||
|
||||
|
||||
def get_tqdm_bar(total, desc: Optional[str] = None, unit="B"):
|
||||
return tqdm(
|
||||
total=total,
|
||||
unit=unit,
|
||||
unit_scale=True,
|
||||
unit_divisor=1024,
|
||||
desc=desc,
|
||||
dynamic_ncols=True,
|
||||
bar_format=TQDM_BAR_FORMAT,
|
||||
)
|
183
tests/test_config.py
Normal file
183
tests/test_config.py
Normal file
|
@ -0,0 +1,183 @@
|
|||
import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
from streamrip.config import *
|
||||
|
||||
SAMPLE_CONFIG = "tests/test_config.toml"
|
||||
|
||||
|
||||
# Define a fixture to create a sample ConfigData instance for testing
|
||||
@pytest.fixture
|
||||
def sample_config_data() -> ConfigData:
|
||||
# Create a sample ConfigData instance here
|
||||
# You can customize this to your specific needs for testing
|
||||
with open(SAMPLE_CONFIG) as f:
|
||||
config_data = ConfigData.from_toml(f.read())
|
||||
return config_data
|
||||
|
||||
|
||||
# Define a fixture to create a sample Config instance for testing
|
||||
@pytest.fixture
|
||||
def sample_config() -> Config:
|
||||
# Create a sample Config instance here
|
||||
# You can customize this to your specific needs for testing
|
||||
config = Config(SAMPLE_CONFIG)
|
||||
return config
|
||||
|
||||
|
||||
def test_sample_config_data_properties(sample_config_data):
|
||||
# Test the properties of ConfigData
|
||||
assert sample_config_data.modified is False # Ensure initial state is not modified
|
||||
|
||||
|
||||
def test_sample_config_data_modification(sample_config_data):
|
||||
# Test modifying ConfigData and checking modified property
|
||||
sample_config_data.set_modified()
|
||||
assert sample_config_data._modified is True
|
||||
|
||||
|
||||
def test_sample_config_data_fields(sample_config_data):
|
||||
test_config = ConfigData(
|
||||
toml=None, # type: ignore
|
||||
downloads=DownloadsConfig(
|
||||
folder="test_folder",
|
||||
source_subdirectories=False,
|
||||
concurrency=True,
|
||||
max_connections=3,
|
||||
requests_per_minute=-1,
|
||||
),
|
||||
qobuz=QobuzConfig(
|
||||
use_auth_token=False,
|
||||
email_or_userid="test@gmail.com",
|
||||
password_or_token="test_pwd",
|
||||
app_id="12345",
|
||||
quality=3,
|
||||
download_booklets=True,
|
||||
secrets=["secret1", "secret2"],
|
||||
),
|
||||
tidal=TidalConfig(
|
||||
user_id="userid",
|
||||
country_code="countrycode",
|
||||
access_token="accesstoken",
|
||||
refresh_token="refreshtoken",
|
||||
token_expiry="tokenexpiry",
|
||||
quality=3,
|
||||
download_videos=True,
|
||||
),
|
||||
deezer=DeezerConfig(
|
||||
arl="testarl", quality=2, use_deezloader=True, deezloader_warnings=True
|
||||
),
|
||||
soundcloud=SoundcloudConfig(
|
||||
client_id="clientid", app_version="appverison", quality=0
|
||||
),
|
||||
youtube=YoutubeConfig(
|
||||
video_downloads_folder="videodownloadsfolder",
|
||||
quality=0,
|
||||
download_videos=False,
|
||||
),
|
||||
lastfm=LastFmConfig(source="qobuz", fallback_source="deezer"),
|
||||
filepaths=FilepathsConfig(
|
||||
add_singles_to_folder=False,
|
||||
folder_format="{albumartist} - {title} ({year}) [{container}] [{bit_depth}B-{sampling_rate}kHz]",
|
||||
track_format="{tracknumber}. {artist} - {title}{explicit}",
|
||||
restrict_characters=False,
|
||||
truncate=True,
|
||||
),
|
||||
artwork=ArtworkConfig(
|
||||
embed=True, size="large", max_width=-1, max_height=-1, keep_hires_cover=True
|
||||
),
|
||||
metadata=MetadataConfig(
|
||||
set_playlist_to_album=True, new_playlist_tracknumbers=True, exclude=[]
|
||||
),
|
||||
qobuz_filters=QobuzDiscographyFilterConfig(
|
||||
extras=False,
|
||||
repeats=False,
|
||||
non_albums=False,
|
||||
features=False,
|
||||
non_studio_albums=False,
|
||||
non_remaster=False,
|
||||
),
|
||||
theme=ThemeConfig(progress_bar="dainty"),
|
||||
database=DatabaseConfig(
|
||||
downloads_enabled=True,
|
||||
downloads_path="downloadspath",
|
||||
failed_downloads_enabled=True,
|
||||
failed_downloads_path="faileddownloadspath",
|
||||
),
|
||||
conversion=ConversionConfig(
|
||||
enabled=False,
|
||||
codec="ALAC",
|
||||
sampling_rate=48000,
|
||||
bit_depth=24,
|
||||
lossy_bitrate=320,
|
||||
),
|
||||
_modified=False,
|
||||
)
|
||||
assert sample_config_data.downloads == test_config.downloads
|
||||
assert sample_config_data.qobuz == test_config.qobuz
|
||||
assert sample_config_data.tidal == test_config.tidal
|
||||
assert sample_config_data.deezer == test_config.deezer
|
||||
assert sample_config_data.soundcloud == test_config.soundcloud
|
||||
assert sample_config_data.youtube == test_config.youtube
|
||||
assert sample_config_data.lastfm == test_config.lastfm
|
||||
assert sample_config_data.artwork == test_config.artwork
|
||||
assert sample_config_data.filepaths == test_config.filepaths
|
||||
assert sample_config_data.metadata == test_config.metadata
|
||||
assert sample_config_data.qobuz_filters == test_config.qobuz_filters
|
||||
assert sample_config_data.theme == test_config.theme
|
||||
assert sample_config_data.database == test_config.database
|
||||
assert sample_config_data.conversion == test_config.conversion
|
||||
|
||||
|
||||
def test_config_save_file_called_on_del(sample_config, mocker):
|
||||
sample_config.file.set_modified()
|
||||
mockf = mocker.Mock()
|
||||
|
||||
sample_config.save_file = mockf
|
||||
sample_config.__del__()
|
||||
mockf.assert_called_once()
|
||||
|
||||
|
||||
def test_config_update_on_save():
|
||||
tmp_config_path = "tests/config2.toml"
|
||||
shutil.copy(SAMPLE_CONFIG, tmp_config_path)
|
||||
conf = Config(tmp_config_path)
|
||||
conf.file.downloads.folder = "new_folder"
|
||||
conf.file.set_modified()
|
||||
conf.save_file()
|
||||
conf2 = Config(tmp_config_path)
|
||||
os.remove(tmp_config_path)
|
||||
|
||||
assert conf2.session.downloads.folder == "new_folder"
|
||||
|
||||
|
||||
def test_config_update_on_del():
|
||||
tmp_config_path = "tests/config2.toml"
|
||||
shutil.copy(SAMPLE_CONFIG, tmp_config_path)
|
||||
conf = Config(tmp_config_path)
|
||||
conf.file.downloads.folder = "new_folder"
|
||||
conf.file.set_modified()
|
||||
del conf
|
||||
conf2 = Config(tmp_config_path)
|
||||
os.remove(tmp_config_path)
|
||||
|
||||
assert conf2.session.downloads.folder == "new_folder"
|
||||
|
||||
|
||||
def test_config_dont_update_without_set_modified():
|
||||
tmp_config_path = "tests/config2.toml"
|
||||
shutil.copy(SAMPLE_CONFIG, tmp_config_path)
|
||||
conf = Config(tmp_config_path)
|
||||
conf.file.downloads.folder = "new_folder"
|
||||
del conf
|
||||
conf2 = Config(tmp_config_path)
|
||||
os.remove(tmp_config_path)
|
||||
|
||||
assert conf2.session.downloads.folder == "test_folder"
|
||||
|
||||
|
||||
# Other tests for the Config class can be added as needed
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main()
|
178
tests/test_config.toml
Normal file
178
tests/test_config.toml
Normal file
|
@ -0,0 +1,178 @@
|
|||
[downloads]
|
||||
# Folder where tracks are downloaded to
|
||||
folder = "test_folder"
|
||||
# Put Qobuz albums in a 'Qobuz' folder, Tidal albums in 'Tidal' etc.
|
||||
source_subdirectories = false
|
||||
|
||||
# Download (and convert) tracks all at once, instead of sequentially.
|
||||
# If you are converting the tracks, or have fast internet, this will
|
||||
# substantially improve processing speed.
|
||||
concurrency = true
|
||||
# The maximum number of tracks to download at once
|
||||
# If you have very fast internet, you will benefit from a higher value,
|
||||
# A value that is too high for your bandwidth may cause slowdowns
|
||||
max_connections = 3
|
||||
# Max number of API requests to handle per minute
|
||||
# Set to -1 for no limit
|
||||
requests_per_minute = -1
|
||||
|
||||
[qobuz]
|
||||
# 1: 320kbps MP3, 2: 16/44.1, 3: 24/<=96, 4: 24/>=96
|
||||
quality = 3
|
||||
# This will download booklet pdfs that are included with some albums
|
||||
download_booklets = true
|
||||
|
||||
# Authenticate to Qobuz using auth token? Value can be true/false only
|
||||
use_auth_token = false
|
||||
# Enter your userid if the above use_auth_token is set to true, else enter your email
|
||||
email_or_userid = "test@gmail.com"
|
||||
# Enter your auth token if the above use_auth_token is set to true, else enter the md5 hash of your plaintext password
|
||||
password_or_token = "test_pwd"
|
||||
# Do not change
|
||||
app_id = "12345"
|
||||
# Do not change
|
||||
secrets = ['secret1', 'secret2']
|
||||
|
||||
[tidal]
|
||||
# 0: 256kbps AAC, 1: 320kbps AAC, 2: 16/44.1 "HiFi" FLAC, 3: 24/44.1 "MQA" FLAC
|
||||
quality = 3
|
||||
# This will download videos included in Video Albums.
|
||||
download_videos = true
|
||||
|
||||
# Do not change any of the fields below
|
||||
user_id = "userid"
|
||||
country_code = "countrycode"
|
||||
access_token = "accesstoken"
|
||||
refresh_token = "refreshtoken"
|
||||
# Tokens last 1 week after refresh. This is the Unix timestamp of the expiration
|
||||
# time. If you haven't used streamrip in more than a week, you may have to log
|
||||
# in again using `rip config --tidal`
|
||||
token_expiry = "tokenexpiry"
|
||||
|
||||
[deezer]
|
||||
# 0, 1, or 2
|
||||
# This only applies to paid Deezer subscriptions. Those using deezloader
|
||||
# are automatically limited to quality = 1
|
||||
quality = 2
|
||||
# An authentication cookie that allows streamrip to use your Deezer account
|
||||
# See https://github.com/nathom/streamrip/wiki/Finding-Your-Deezer-ARL-Cookie
|
||||
# for instructions on how to find this
|
||||
arl = "testarl"
|
||||
# This allows for free 320kbps MP3 downloads from Deezer
|
||||
# If an arl is provided, deezloader is never used
|
||||
use_deezloader = true
|
||||
# This warns you when the paid deezer account is not logged in and rip falls
|
||||
# back to deezloader, which is unreliable
|
||||
deezloader_warnings = true
|
||||
|
||||
[soundcloud]
|
||||
# Only 0 is available for now
|
||||
quality = 0
|
||||
# This changes periodically, so it needs to be updated
|
||||
client_id = "clientid"
|
||||
app_version = "appverison"
|
||||
|
||||
[youtube]
|
||||
# Only 0 is available for now
|
||||
quality = 0
|
||||
# Download the video along with the audio
|
||||
download_videos = false
|
||||
# The path to download the videos to
|
||||
video_downloads_folder = "videodownloadsfolder"
|
||||
|
||||
# This stores a list of item IDs so that repeats are not downloaded.
|
||||
[database]
|
||||
downloads_enabled = true
|
||||
downloads_path = "downloadspath"
|
||||
|
||||
# If a download fails, the item ID is stored here. Then, `rip repair` can be
|
||||
# called to retry the downloads
|
||||
failed_downloads_enabled = true
|
||||
failed_downloads_path = "faileddownloadspath"
|
||||
|
||||
# Convert tracks to a codec after downloading them.
|
||||
[conversion]
|
||||
enabled = false
|
||||
# FLAC, ALAC, OPUS, MP3, VORBIS, or AAC
|
||||
codec = "ALAC"
|
||||
# In Hz. Tracks are downsampled if their sampling rate is greater than this.
|
||||
# Value of 48000 is recommended to maximize quality and minimize space
|
||||
sampling_rate = 48000
|
||||
# Only 16 and 24 are available. It is only applied when the bit depth is higher
|
||||
# than this value.
|
||||
bit_depth = 24
|
||||
# Only applicable for lossy codecs
|
||||
lossy_bitrate = 320
|
||||
|
||||
# Filter a Qobuz artist's discography. Set to 'true' to turn on a filter.
|
||||
[qobuz_filters]
|
||||
# Remove Collectors Editions, live recordings, etc.
|
||||
extras = false
|
||||
# Picks the highest quality out of albums with identical titles.
|
||||
repeats = false
|
||||
# Remove EPs and Singles
|
||||
non_albums = false
|
||||
# Remove albums whose artist is not the one requested
|
||||
features = false
|
||||
# Skip non studio albums
|
||||
non_studio_albums = false
|
||||
# Only download remastered albums
|
||||
non_remaster = false
|
||||
|
||||
[artwork]
|
||||
# Write the image to the audio file
|
||||
embed = true
|
||||
# The size of the artwork to embed. Options: thumbnail, small, large, original.
|
||||
# "original" images can be up to 30MB, and may fail embedding.
|
||||
# Using "large" is recommended.
|
||||
size = "large"
|
||||
# Both of these options limit the size of the embedded artwork. If their values
|
||||
# are larger than the actual dimensions of the image, they will be ignored.
|
||||
# If either value is -1, the image is left untouched.
|
||||
max_width = -1
|
||||
max_height = -1
|
||||
# Save the cover image at the highest quality as a seperate jpg file
|
||||
keep_hires_cover = true
|
||||
|
||||
[metadata]
|
||||
# Sets the value of the 'ALBUM' field in the metadata to the playlist's name.
|
||||
# This is useful if your music library software organizes tracks based on album name.
|
||||
set_playlist_to_album = true
|
||||
# Replaces the original track's tracknumber with it's position in the playlist
|
||||
new_playlist_tracknumbers = true
|
||||
# The following metadata tags won't be applied
|
||||
# See https://github.com/nathom/streamrip/wiki/Metadata-Tag-Names for more info
|
||||
exclude = []
|
||||
|
||||
# Changes the folder and file names generated by streamrip.
|
||||
[filepaths]
|
||||
# Create folders for single tracks within the downloads directory using the folder_format
|
||||
# template
|
||||
add_singles_to_folder = false
|
||||
# Available keys: "albumartist", "title", "year", "bit_depth", "sampling_rate",
|
||||
# "container", "id", and "albumcomposer"
|
||||
folder_format = "{albumartist} - {title} ({year}) [{container}] [{bit_depth}B-{sampling_rate}kHz]"
|
||||
# Available keys: "tracknumber", "artist", "albumartist", "composer", "title",
|
||||
# and "albumcomposer"
|
||||
track_format = "{tracknumber}. {artist} - {title}{explicit}"
|
||||
# Only allow printable ASCII characters in filenames.
|
||||
restrict_characters = false
|
||||
# Truncate the filename if it is greater than 120 characters
|
||||
# Setting this to false may cause downloads to fail on some systems
|
||||
truncate = true
|
||||
|
||||
# Last.fm playlists are downloaded by searching for the titles of the tracks
|
||||
[lastfm]
|
||||
# The source on which to search for the tracks.
|
||||
source = "qobuz"
|
||||
# If no results were found with the primary source, the item is searched for
|
||||
# on this one.
|
||||
fallback_source = "deezer"
|
||||
|
||||
[theme]
|
||||
# Options: "dainty" or "plain"
|
||||
progress_bar = "dainty"
|
||||
|
||||
[misc]
|
||||
# Metadata to identify this config file. Do not change.
|
||||
version = "2.0"
|
|
@ -1,20 +0,0 @@
|
|||
import os
|
||||
import time
|
||||
from pprint import pprint
|
||||
|
||||
from streamrip.downloadtools import DownloadPool
|
||||
|
||||
|
||||
def test_downloadpool(tmpdir):
|
||||
start = time.perf_counter()
|
||||
with DownloadPool(
|
||||
(f"https://pokeapi.co/api/v2/pokemon/{number}" for number in range(1, 151)),
|
||||
tempdir=tmpdir,
|
||||
) as pool:
|
||||
pool.download()
|
||||
assert len(os.listdir(tmpdir)) == 151
|
||||
|
||||
# the tempfiles should be removed at this point
|
||||
assert len(os.listdir(tmpdir)) == 0
|
||||
|
||||
print(f"Finished in {time.perf_counter() - start}s")
|
119
tests/tests.py
119
tests/tests.py
|
@ -1,119 +0,0 @@
|
|||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from click import echo, secho
|
||||
|
||||
test_urls = {
|
||||
"qobuz": "https://www.qobuz.com/us-en/album/blackest-blue-morcheeba/h4nngz0wgqesc",
|
||||
"tidal": "https://tidal.com/browse/album/183284294",
|
||||
"deezer": "https://www.deezer.com/us/album/225281222",
|
||||
"soundcloud": "https://soundcloud.com/dj-khaled/sets/khaled-khaled",
|
||||
}
|
||||
|
||||
|
||||
def reset_config():
|
||||
global cfg_path
|
||||
global new_cfg_path
|
||||
|
||||
p = subprocess.Popen(
|
||||
["rip", "config", "-p"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
out, err = p.communicate()
|
||||
cfg_path = out.decode("utf-8").strip()
|
||||
# cfg_path = re.search(
|
||||
# r"(/[\w\d\s]+(?:/[\w\d \.]+)*)", out.decode("utf-8")
|
||||
# ).group(1)
|
||||
new_cfg_path = f"{cfg_path}.tmp"
|
||||
shutil.copy(cfg_path, new_cfg_path)
|
||||
subprocess.Popen(["rip", "config", "--update"])
|
||||
|
||||
|
||||
def restore_config():
|
||||
global cfg_path
|
||||
global new_cfg_path
|
||||
|
||||
os.remove(cfg_path)
|
||||
shutil.move(new_cfg_path, cfg_path)
|
||||
|
||||
|
||||
def download_albums():
|
||||
rip_url = ["rip", "-nd", "-u"]
|
||||
procs = []
|
||||
for url in test_urls.values():
|
||||
procs.append(subprocess.run([*rip_url, url]))
|
||||
|
||||
for p in procs:
|
||||
echo(p)
|
||||
|
||||
|
||||
def check_album_dl_success(folder, correct):
|
||||
if set(os.listdir(folder)) != set(correct):
|
||||
secho(f"Check for {folder} failed!", fg="red")
|
||||
else:
|
||||
secho(f"Check for {folder} succeeded!", fg="green")
|
||||
|
||||
|
||||
def main():
|
||||
reset_config()
|
||||
download_albums()
|
||||
check_album_dl_success(
|
||||
"/Users/nathan/StreamripDownloads/Morcheeba - Blackest Blue (2021) [FLAC] [24B-44.1kHz]",
|
||||
{
|
||||
"04. Morcheeba - Say It's Over.flac",
|
||||
"01. Morcheeba - Cut My Heart Out.flac",
|
||||
"02. Morcheeba - Killed Our Love.flac",
|
||||
"07. Morcheeba - Namaste.flac",
|
||||
"03. Morcheeba - Sounds Of Blue.flac",
|
||||
"10. Morcheeba - The Edge Of The World.flac",
|
||||
"08. Morcheeba - The Moon.flac",
|
||||
"09. Morcheeba - Falling Skies.flac",
|
||||
"cover.jpg",
|
||||
"05. Morcheeba - Sulphur Soul.flac",
|
||||
"06. Morcheeba - Oh Oh Yeah.flac",
|
||||
},
|
||||
)
|
||||
|
||||
check_album_dl_success(
|
||||
"/Users/nathan/StreamripDownloads/KHALED KHALED",
|
||||
{
|
||||
"05. DJ Khaled - I DID IT (feat. Post Malone, Megan Thee Stallion, Lil Baby & DaBaby).mp3",
|
||||
"09. DJ Khaled - THIS IS MY YEAR (feat. A Boogie Wit Da Hoodie, Big Sean, Rick Ross & Puff Daddy).mp3",
|
||||
"01. DJ Khaled - THANKFUL (feat. Lil Wayne & Jeremih).mp3",
|
||||
"12. DJ Khaled - I CAN HAVE IT ALL (feat. Bryson Tiller, H.E.R. & Meek Mill).mp3",
|
||||
"02. DJ Khaled - EVERY CHANCE I GET (feat. Lil Baby & Lil Durk).mp3",
|
||||
"08. DJ Khaled - POPSTAR (feat. Drake).mp3",
|
||||
"13. DJ Khaled - GREECE (feat. Drake).mp3",
|
||||
"04. DJ Khaled - WE GOING CRAZY (feat. H.E.R. & Migos).mp3",
|
||||
"10. DJ Khaled - SORRY NOT SORRY (Harmonies by The Hive) [feat. Nas, JAY-Z & James Fauntleroy].mp3",
|
||||
"03. DJ Khaled - BIG PAPER (feat. Cardi B).mp3",
|
||||
"14. DJ Khaled - WHERE YOU COME FROM (feat. Buju Banton, Capleton & Bounty Killer).mp3",
|
||||
"07. DJ Khaled - BODY IN MOTION (feat. Bryson Tiller, Lil Baby & Roddy Ricch).mp3",
|
||||
"06. DJ Khaled - LET IT GO (feat. Justin Bieber & 21 Savage).mp3",
|
||||
"11. DJ Khaled - JUST BE (feat. Justin Timberlake).mp3",
|
||||
},
|
||||
)
|
||||
|
||||
check_album_dl_success(
|
||||
"/Users/nathan/StreamripDownloads/Paul Weller - Fat Pop (2021) [FLAC] [24B-44.1kHz]",
|
||||
{
|
||||
"01. Paul Weller - Cosmic Fringes.flac",
|
||||
"11. Paul Weller - In Better Times.flac",
|
||||
"05. Paul Weller - Glad Times.flac",
|
||||
"08. Paul Weller - That Pleasure.flac",
|
||||
"04. Paul Weller - Shades Of Blue.flac",
|
||||
"12. Paul Weller - Still Glides The Stream.flac",
|
||||
"03. Paul Weller - Fat Pop.flac",
|
||||
"cover.jpg",
|
||||
"02. Paul Weller - True.flac",
|
||||
"09. Paul Weller - Failed.flac",
|
||||
"06. Paul Weller - Cobweb Connections.flac",
|
||||
"10. Paul Weller - Moving Canvas.flac",
|
||||
"07. Paul Weller - Testify.flac",
|
||||
},
|
||||
)
|
||||
restore_config()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Add table
Add a link
Reference in a new issue