mirror of
https://github.com/nathom/streamrip.git
synced 2025-05-13 22:54:55 -04:00
Formatting
This commit is contained in:
parent
1f3b24e5b7
commit
35c8932ffb
12 changed files with 151 additions and 363 deletions
0
rg
Normal file
0
rg
Normal file
44
rip/cli.py
44
rip/cli.py
|
@ -86,9 +86,7 @@ class DownloadCommand(Command):
|
||||||
if len(core) > 0:
|
if len(core) > 0:
|
||||||
core.download()
|
core.download()
|
||||||
elif not urls and path is None:
|
elif not urls and path is None:
|
||||||
self.line(
|
self.line("<error>Must pass arguments. See </><cmd>rip url -h</cmd>.")
|
||||||
"<error>Must pass arguments. See </><cmd>rip url -h</cmd>."
|
|
||||||
)
|
|
||||||
|
|
||||||
update_check.join()
|
update_check.join()
|
||||||
if outdated:
|
if outdated:
|
||||||
|
@ -115,16 +113,10 @@ class DownloadCommand(Command):
|
||||||
"https://api.github.com/repos/nathom/streamrip/releases/latest"
|
"https://api.github.com/repos/nathom/streamrip/releases/latest"
|
||||||
).json()["body"]
|
).json()["body"]
|
||||||
|
|
||||||
release_notes = md_header.sub(
|
release_notes = md_header.sub(r"<header>\1</header>", release_notes)
|
||||||
r"<header>\1</header>", release_notes
|
release_notes = bullet_point.sub(r"<options=bold>•</> \1", release_notes)
|
||||||
)
|
|
||||||
release_notes = bullet_point.sub(
|
|
||||||
r"<options=bold>•</> \1", release_notes
|
|
||||||
)
|
|
||||||
release_notes = code.sub(r"<cmd>\1</cmd>", release_notes)
|
release_notes = code.sub(r"<cmd>\1</cmd>", release_notes)
|
||||||
release_notes = issue_reference.sub(
|
release_notes = issue_reference.sub(r"<options=bold>\1</>", release_notes)
|
||||||
r"<options=bold>\1</>", release_notes
|
|
||||||
)
|
|
||||||
|
|
||||||
self.line(release_notes)
|
self.line(release_notes)
|
||||||
|
|
||||||
|
@ -154,9 +146,7 @@ class SearchCommand(Command):
|
||||||
|
|
||||||
def handle(self):
|
def handle(self):
|
||||||
query = self.argument("query")
|
query = self.argument("query")
|
||||||
source, type = clean_options(
|
source, type = clean_options(self.option("source"), self.option("type"))
|
||||||
self.option("source"), self.option("type")
|
|
||||||
)
|
|
||||||
|
|
||||||
config = Config()
|
config = Config()
|
||||||
core = RipCore(config)
|
core = RipCore(config)
|
||||||
|
@ -219,18 +209,14 @@ class DiscoverCommand(Command):
|
||||||
from streamrip.constants import QOBUZ_FEATURED_KEYS
|
from streamrip.constants import QOBUZ_FEATURED_KEYS
|
||||||
|
|
||||||
if chosen_list not in QOBUZ_FEATURED_KEYS:
|
if chosen_list not in QOBUZ_FEATURED_KEYS:
|
||||||
self.line(
|
self.line(f'<error>Error: list "{chosen_list}" not available</error>')
|
||||||
f'<error>Error: list "{chosen_list}" not available</error>'
|
|
||||||
)
|
|
||||||
self.line(self.help)
|
self.line(self.help)
|
||||||
return 1
|
return 1
|
||||||
elif source == "deezer":
|
elif source == "deezer":
|
||||||
from streamrip.constants import DEEZER_FEATURED_KEYS
|
from streamrip.constants import DEEZER_FEATURED_KEYS
|
||||||
|
|
||||||
if chosen_list not in DEEZER_FEATURED_KEYS:
|
if chosen_list not in DEEZER_FEATURED_KEYS:
|
||||||
self.line(
|
self.line(f'<error>Error: list "{chosen_list}" not available</error>')
|
||||||
f'<error>Error: list "{chosen_list}" not available</error>'
|
|
||||||
)
|
|
||||||
self.line(self.help)
|
self.line(self.help)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
@ -318,9 +304,7 @@ class ConfigCommand(Command):
|
||||||
self.line(f"<info>{CONFIG_PATH}</info>")
|
self.line(f"<info>{CONFIG_PATH}</info>")
|
||||||
|
|
||||||
if self.option("open"):
|
if self.option("open"):
|
||||||
self.line(
|
self.line(f"Opening <url>{CONFIG_PATH}</url> in default application")
|
||||||
f"Opening <url>{CONFIG_PATH}</url> in default application"
|
|
||||||
)
|
|
||||||
launch(CONFIG_PATH)
|
launch(CONFIG_PATH)
|
||||||
|
|
||||||
if self.option("reset"):
|
if self.option("reset"):
|
||||||
|
@ -367,9 +351,7 @@ class ConfigCommand(Command):
|
||||||
self.line("<b>Sucessfully logged in!</b>")
|
self.line("<b>Sucessfully logged in!</b>")
|
||||||
|
|
||||||
except AuthenticationError:
|
except AuthenticationError:
|
||||||
self.line(
|
self.line("<error>Could not log in. Double check your ARL</error>")
|
||||||
"<error>Could not log in. Double check your ARL</error>"
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.option("qobuz"):
|
if self.option("qobuz"):
|
||||||
import getpass
|
import getpass
|
||||||
|
@ -377,9 +359,7 @@ class ConfigCommand(Command):
|
||||||
|
|
||||||
self._config.file["qobuz"]["email"] = self.ask("Qobuz email:")
|
self._config.file["qobuz"]["email"] = self.ask("Qobuz email:")
|
||||||
self._config.file["qobuz"]["password"] = hashlib.md5(
|
self._config.file["qobuz"]["password"] = hashlib.md5(
|
||||||
getpass.getpass(
|
getpass.getpass("Qobuz password (won't show on screen): ").encode()
|
||||||
"Qobuz password (won't show on screen): "
|
|
||||||
).encode()
|
|
||||||
).hexdigest()
|
).hexdigest()
|
||||||
self._config.save()
|
self._config.save()
|
||||||
|
|
||||||
|
@ -631,9 +611,7 @@ class Application(BaseApplication):
|
||||||
formatter.set_style("path", Style("green", options=["bold"]))
|
formatter.set_style("path", Style("green", options=["bold"]))
|
||||||
formatter.set_style("cmd", Style("magenta"))
|
formatter.set_style("cmd", Style("magenta"))
|
||||||
formatter.set_style("title", Style("yellow", options=["bold"]))
|
formatter.set_style("title", Style("yellow", options=["bold"]))
|
||||||
formatter.set_style(
|
formatter.set_style("header", Style("yellow", options=["bold", "underline"]))
|
||||||
"header", Style("yellow", options=["bold", "underline"])
|
|
||||||
)
|
|
||||||
io.output.set_formatter(formatter)
|
io.output.set_formatter(formatter)
|
||||||
io.error_output.set_formatter(formatter)
|
io.error_output.set_formatter(formatter)
|
||||||
|
|
||||||
|
|
|
@ -31,9 +31,7 @@ class Config:
|
||||||
values.
|
values.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
default_config_path = os.path.join(
|
default_config_path = os.path.join(os.path.dirname(__file__), "config.toml")
|
||||||
os.path.dirname(__file__), "config.toml"
|
|
||||||
)
|
|
||||||
|
|
||||||
with open(default_config_path) as cfg:
|
with open(default_config_path) as cfg:
|
||||||
defaults: Dict[str, Any] = tomlkit.parse(cfg.read().strip())
|
defaults: Dict[str, Any] = tomlkit.parse(cfg.read().strip())
|
||||||
|
@ -57,10 +55,7 @@ class Config:
|
||||||
|
|
||||||
if os.path.isfile(self._path):
|
if os.path.isfile(self._path):
|
||||||
self.load()
|
self.load()
|
||||||
if (
|
if self.file["misc"]["version"] != self.defaults["misc"]["version"]:
|
||||||
self.file["misc"]["version"]
|
|
||||||
!= self.defaults["misc"]["version"]
|
|
||||||
):
|
|
||||||
secho(
|
secho(
|
||||||
"Updating config file to new version. Some settings may be lost.",
|
"Updating config file to new version. Some settings may be lost.",
|
||||||
fg="yellow",
|
fg="yellow",
|
||||||
|
|
115
rip/core.py
115
rip/core.py
|
@ -112,18 +112,14 @@ class RipCore(list):
|
||||||
else:
|
else:
|
||||||
self.config = config
|
self.config = config
|
||||||
|
|
||||||
if (
|
if (theme := self.config.file["theme"]["progress_bar"]) != TQDM_DEFAULT_THEME:
|
||||||
theme := self.config.file["theme"]["progress_bar"]
|
|
||||||
) != TQDM_DEFAULT_THEME:
|
|
||||||
set_progress_bar_theme(theme.lower())
|
set_progress_bar_theme(theme.lower())
|
||||||
|
|
||||||
def get_db(db_type: str) -> db.Database:
|
def get_db(db_type: str) -> db.Database:
|
||||||
db_settings = self.config.session["database"]
|
db_settings = self.config.session["database"]
|
||||||
db_class = db.CLASS_MAP[db_type]
|
db_class = db.CLASS_MAP[db_type]
|
||||||
|
|
||||||
if db_settings[db_type]["enabled"] and db_settings.get(
|
if db_settings[db_type]["enabled"] and db_settings.get("enabled", True):
|
||||||
"enabled", True
|
|
||||||
):
|
|
||||||
default_db_path = DB_PATH_MAP[db_type]
|
default_db_path = DB_PATH_MAP[db_type]
|
||||||
path = db_settings[db_type]["path"]
|
path = db_settings[db_type]["path"]
|
||||||
|
|
||||||
|
@ -218,8 +214,7 @@ class RipCore(list):
|
||||||
logger.debug(session)
|
logger.debug(session)
|
||||||
# So that the dictionary isn't searched for the same keys multiple times
|
# So that the dictionary isn't searched for the same keys multiple times
|
||||||
artwork, conversion, filepaths, metadata = (
|
artwork, conversion, filepaths, metadata = (
|
||||||
session[key]
|
session[key] for key in ("artwork", "conversion", "filepaths", "metadata")
|
||||||
for key in ("artwork", "conversion", "filepaths", "metadata")
|
|
||||||
)
|
)
|
||||||
concurrency = session["downloads"]["concurrency"]
|
concurrency = session["downloads"]["concurrency"]
|
||||||
return {
|
return {
|
||||||
|
@ -265,9 +260,7 @@ class RipCore(list):
|
||||||
)
|
)
|
||||||
exit()
|
exit()
|
||||||
|
|
||||||
for counter, (source, media_type, item_id) in enumerate(
|
for counter, (source, media_type, item_id) in enumerate(self.failed_db):
|
||||||
self.failed_db
|
|
||||||
):
|
|
||||||
if counter >= max_items:
|
if counter >= max_items:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -290,9 +283,7 @@ class RipCore(list):
|
||||||
|
|
||||||
logger.debug("Arguments from config: %s", arguments)
|
logger.debug("Arguments from config: %s", arguments)
|
||||||
|
|
||||||
source_subdirs = self.config.session["downloads"][
|
source_subdirs = self.config.session["downloads"]["source_subdirectories"]
|
||||||
"source_subdirectories"
|
|
||||||
]
|
|
||||||
for item in self:
|
for item in self:
|
||||||
# Item already checked in database in handle_urls
|
# Item already checked in database in handle_urls
|
||||||
if source_subdirs:
|
if source_subdirs:
|
||||||
|
@ -304,26 +295,20 @@ class RipCore(list):
|
||||||
item.download(**arguments)
|
item.download(**arguments)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
arguments["quality"] = self.config.session[item.client.source][
|
arguments["quality"] = self.config.session[item.client.source]["quality"]
|
||||||
"quality"
|
|
||||||
]
|
|
||||||
if isinstance(item, Artist):
|
if isinstance(item, Artist):
|
||||||
filters_ = tuple(
|
filters_ = tuple(
|
||||||
k for k, v in self.config.session["filters"].items() if v
|
k for k, v in self.config.session["filters"].items() if v
|
||||||
)
|
)
|
||||||
arguments["filters"] = filters_
|
arguments["filters"] = filters_
|
||||||
logger.debug(
|
logger.debug("Added filter argument for artist/label: %s", filters_)
|
||||||
"Added filter argument for artist/label: %s", filters_
|
|
||||||
)
|
|
||||||
|
|
||||||
if not isinstance(item, Tracklist) or not item.loaded:
|
if not isinstance(item, Tracklist) or not item.loaded:
|
||||||
logger.debug("Loading metadata")
|
logger.debug("Loading metadata")
|
||||||
try:
|
try:
|
||||||
item.load_meta(**arguments)
|
item.load_meta(**arguments)
|
||||||
except NonStreamable:
|
except NonStreamable:
|
||||||
self.failed_db.add(
|
self.failed_db.add((item.client.source, item.type, item.id))
|
||||||
(item.client.source, item.type, item.id)
|
|
||||||
)
|
|
||||||
secho(f"{item!s} is not available, skipping.", fg="red")
|
secho(f"{item!s} is not available, skipping.", fg="red")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -360,9 +345,7 @@ class RipCore(list):
|
||||||
:param featured_list: The name of the list. See `rip discover --help`.
|
:param featured_list: The name of the list. See `rip discover --help`.
|
||||||
:type featured_list: str
|
:type featured_list: str
|
||||||
"""
|
"""
|
||||||
self.extend(
|
self.extend(self.search("qobuz", featured_list, "featured", limit=max_items))
|
||||||
self.search("qobuz", featured_list, "featured", limit=max_items)
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_client(self, source: str) -> Client:
|
def get_client(self, source: str) -> Client:
|
||||||
"""Get a client given the source and log in.
|
"""Get a client given the source and log in.
|
||||||
|
@ -427,6 +410,17 @@ class RipCore(list):
|
||||||
self.config.file["qobuz"]["secrets"],
|
self.config.file["qobuz"]["secrets"],
|
||||||
) = client.get_tokens()
|
) = client.get_tokens()
|
||||||
self.config.save()
|
self.config.save()
|
||||||
|
elif (
|
||||||
|
client.source == "soundcloud"
|
||||||
|
and not creds.get("client_id")
|
||||||
|
and not creds.get("app_version")
|
||||||
|
):
|
||||||
|
(
|
||||||
|
self.config.file["soundcloud"]["client_id"],
|
||||||
|
self.config.file["soundcloud"]["app_version"],
|
||||||
|
) = client.get_tokens()
|
||||||
|
self.config.save()
|
||||||
|
|
||||||
elif client.source == "tidal":
|
elif client.source == "tidal":
|
||||||
self.config.file["tidal"].update(client.get_tokens())
|
self.config.file["tidal"].update(client.get_tokens())
|
||||||
self.config.save() # only for the expiry stamp
|
self.config.save() # only for the expiry stamp
|
||||||
|
@ -435,14 +429,14 @@ class RipCore(list):
|
||||||
"""Return the type of the url and the id.
|
"""Return the type of the url and the id.
|
||||||
|
|
||||||
Compatible with urls of the form:
|
Compatible with urls of the form:
|
||||||
https://www.qobuz.com/us-en/{type}/{name}/{id}
|
https://www.qobuz.com/us-en/type/name/id
|
||||||
https://open.qobuz.com/{type}/{id}
|
https://open.qobuz.com/type/id
|
||||||
https://play.qobuz.com/{type}/{id}
|
https://play.qobuz.com/type/id
|
||||||
|
|
||||||
https://www.deezer.com/us/{type}/{id}
|
https://www.deezer.com/us/type/id
|
||||||
https://tidal.com/browse/{type}/{id}
|
https://tidal.com/browse/type/id
|
||||||
|
|
||||||
:raises exceptions.ParsingError
|
:raises exceptions.ParsingError:
|
||||||
"""
|
"""
|
||||||
parsed: List[Tuple[str, str, str]] = []
|
parsed: List[Tuple[str, str, str]] = []
|
||||||
|
|
||||||
|
@ -468,19 +462,24 @@ class RipCore(list):
|
||||||
fg="yellow",
|
fg="yellow",
|
||||||
)
|
)
|
||||||
parsed.extend(
|
parsed.extend(
|
||||||
("deezer", *extract_deezer_dynamic_link(url))
|
("deezer", *extract_deezer_dynamic_link(url)) for url in dynamic_urls
|
||||||
for url in dynamic_urls
|
|
||||||
)
|
)
|
||||||
|
|
||||||
parsed.extend(URL_REGEX.findall(url)) # Qobuz, Tidal, Dezer
|
parsed.extend(URL_REGEX.findall(url)) # Qobuz, Tidal, Deezer
|
||||||
soundcloud_urls = SOUNDCLOUD_URL_REGEX.findall(url)
|
soundcloud_urls = SOUNDCLOUD_URL_REGEX.findall(url)
|
||||||
soundcloud_items = [
|
|
||||||
self.clients["soundcloud"].get(u) for u in soundcloud_urls
|
if soundcloud_urls:
|
||||||
]
|
soundcloud_client = self.get_client("soundcloud")
|
||||||
|
assert isinstance(soundcloud_client, SoundCloudClient) # for typing
|
||||||
|
|
||||||
|
# TODO: Make this async
|
||||||
|
soundcloud_items = (
|
||||||
|
soundcloud_client.resolve_url(u) for u in soundcloud_urls
|
||||||
|
)
|
||||||
|
|
||||||
parsed.extend(
|
parsed.extend(
|
||||||
("soundcloud", item["kind"], url)
|
("soundcloud", item["kind"], str(item["id"]))
|
||||||
for item, url in zip(soundcloud_items, soundcloud_urls)
|
for item in soundcloud_items
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.debug("Parsed urls: %s", parsed)
|
logger.debug("Parsed urls: %s", parsed)
|
||||||
|
@ -507,15 +506,11 @@ class RipCore(list):
|
||||||
|
|
||||||
# For testing:
|
# For testing:
|
||||||
# https://www.last.fm/user/nathan3895/playlists/12058911
|
# https://www.last.fm/user/nathan3895/playlists/12058911
|
||||||
user_regex = re.compile(
|
user_regex = re.compile(r"https://www\.last\.fm/user/([^/]+)/playlists/\d+")
|
||||||
r"https://www\.last\.fm/user/([^/]+)/playlists/\d+"
|
|
||||||
)
|
|
||||||
lastfm_urls = LASTFM_URL_REGEX.findall(urls)
|
lastfm_urls = LASTFM_URL_REGEX.findall(urls)
|
||||||
try:
|
try:
|
||||||
lastfm_source = self.config.session["lastfm"]["source"]
|
lastfm_source = self.config.session["lastfm"]["source"]
|
||||||
lastfm_fallback_source = self.config.session["lastfm"][
|
lastfm_fallback_source = self.config.session["lastfm"]["fallback_source"]
|
||||||
"fallback_source"
|
|
||||||
]
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
self._config_updating_message()
|
self._config_updating_message()
|
||||||
self.config.update()
|
self.config.update()
|
||||||
|
@ -549,16 +544,12 @@ class RipCore(list):
|
||||||
)
|
)
|
||||||
query_is_clean = banned_words_plain.search(query) is None
|
query_is_clean = banned_words_plain.search(query) is None
|
||||||
|
|
||||||
search_results = self.search(
|
search_results = self.search(source, query, media_type="track")
|
||||||
source, query, media_type="track"
|
|
||||||
)
|
|
||||||
track = next(search_results)
|
track = next(search_results)
|
||||||
|
|
||||||
if query_is_clean:
|
if query_is_clean:
|
||||||
while banned_words.search(track["title"]) is not None:
|
while banned_words.search(track["title"]) is not None:
|
||||||
logger.debug(
|
logger.debug("Track title banned for query=%s", query)
|
||||||
"Track title banned for query=%s", query
|
|
||||||
)
|
|
||||||
track = next(search_results)
|
track = next(search_results)
|
||||||
|
|
||||||
# Because the track is searched as a single we need to set
|
# Because the track is searched as a single we need to set
|
||||||
|
@ -568,9 +559,7 @@ class RipCore(list):
|
||||||
except (NoResultsFound, StopIteration):
|
except (NoResultsFound, StopIteration):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
track = try_search(lastfm_source) or try_search(
|
track = try_search(lastfm_source) or try_search(lastfm_fallback_source)
|
||||||
lastfm_fallback_source
|
|
||||||
)
|
|
||||||
if track is None:
|
if track is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -594,9 +583,7 @@ class RipCore(list):
|
||||||
pl.creator = creator_match.group(1)
|
pl.creator = creator_match.group(1)
|
||||||
|
|
||||||
tracks_not_found = 0
|
tracks_not_found = 0
|
||||||
with concurrent.futures.ThreadPoolExecutor(
|
with concurrent.futures.ThreadPoolExecutor(max_workers=15) as executor:
|
||||||
max_workers=15
|
|
||||||
) as executor:
|
|
||||||
futures = [
|
futures = [
|
||||||
executor.submit(search_query, title, artist, pl)
|
executor.submit(search_query, title, artist, pl)
|
||||||
for title, artist in queries
|
for title, artist in queries
|
||||||
|
@ -725,9 +712,7 @@ class RipCore(list):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
fields = (fname for _, fname, _, _ in Formatter().parse(fmt) if fname)
|
fields = (fname for _, fname, _, _ in Formatter().parse(fmt) if fname)
|
||||||
ret = fmt.format(
|
ret = fmt.format(**{k: media.get(k, default="Unknown") for k in fields})
|
||||||
**{k: media.get(k, default="Unknown") for k in fields}
|
|
||||||
)
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def interactive_search(
|
def interactive_search(
|
||||||
|
@ -865,9 +850,7 @@ class RipCore(list):
|
||||||
playlist_title = html.unescape(playlist_title_match.group(1))
|
playlist_title = html.unescape(playlist_title_match.group(1))
|
||||||
|
|
||||||
if remaining_tracks > 0:
|
if remaining_tracks > 0:
|
||||||
with concurrent.futures.ThreadPoolExecutor(
|
with concurrent.futures.ThreadPoolExecutor(max_workers=15) as executor:
|
||||||
max_workers=15
|
|
||||||
) as executor:
|
|
||||||
last_page = int(remaining_tracks // 50) + int(
|
last_page = int(remaining_tracks // 50) + int(
|
||||||
remaining_tracks % 50 != 0
|
remaining_tracks % 50 != 0
|
||||||
)
|
)
|
||||||
|
@ -922,9 +905,7 @@ class RipCore(list):
|
||||||
fg="blue",
|
fg="blue",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.config.file["deezer"]["arl"] = input(
|
self.config.file["deezer"]["arl"] = input(style("ARL: ", fg="green"))
|
||||||
style("ARL: ", fg="green")
|
|
||||||
)
|
|
||||||
self.config.save()
|
self.config.save()
|
||||||
secho(
|
secho(
|
||||||
f'Credentials saved to config file at "{self.config._path}"',
|
f'Credentials saved to config file at "{self.config._path}"',
|
||||||
|
|
12
rip/db.py
12
rip/db.py
|
@ -71,15 +71,11 @@ class Database:
|
||||||
|
|
||||||
with sqlite3.connect(self.path) as conn:
|
with sqlite3.connect(self.path) as conn:
|
||||||
conditions = " AND ".join(f"{key}=?" for key in items.keys())
|
conditions = " AND ".join(f"{key}=?" for key in items.keys())
|
||||||
command = (
|
command = f"SELECT EXISTS(SELECT 1 FROM {self.name} WHERE {conditions})"
|
||||||
f"SELECT EXISTS(SELECT 1 FROM {self.name} WHERE {conditions})"
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.debug("Executing %s", command)
|
logger.debug("Executing %s", command)
|
||||||
|
|
||||||
return bool(
|
return bool(conn.execute(command, tuple(items.values())).fetchone()[0])
|
||||||
conn.execute(command, tuple(items.values())).fetchone()[0]
|
|
||||||
)
|
|
||||||
|
|
||||||
def __contains__(self, keys: Union[str, dict]) -> bool:
|
def __contains__(self, keys: Union[str, dict]) -> bool:
|
||||||
"""Check whether a key-value pair exists in the database.
|
"""Check whether a key-value pair exists in the database.
|
||||||
|
@ -123,9 +119,7 @@ class Database:
|
||||||
|
|
||||||
params = ", ".join(self.structure.keys())
|
params = ", ".join(self.structure.keys())
|
||||||
question_marks = ", ".join("?" for _ in items)
|
question_marks = ", ".join("?" for _ in items)
|
||||||
command = (
|
command = f"INSERT INTO {self.name} ({params}) VALUES ({question_marks})"
|
||||||
f"INSERT INTO {self.name} ({params}) VALUES ({question_marks})"
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.debug("Executing %s", command)
|
logger.debug("Executing %s", command)
|
||||||
logger.debug("Items to add: %s", items)
|
logger.debug("Items to add: %s", items)
|
||||||
|
|
|
@ -21,12 +21,9 @@ from .constants import (
|
||||||
DEEZER_BASE,
|
DEEZER_BASE,
|
||||||
DEEZER_DL,
|
DEEZER_DL,
|
||||||
DEEZER_FORMATS,
|
DEEZER_FORMATS,
|
||||||
DEEZER_MAX_Q,
|
|
||||||
QOBUZ_BASE,
|
QOBUZ_BASE,
|
||||||
QOBUZ_FEATURED_KEYS,
|
QOBUZ_FEATURED_KEYS,
|
||||||
SOUNDCLOUD_APP_VERSION,
|
|
||||||
SOUNDCLOUD_BASE,
|
SOUNDCLOUD_BASE,
|
||||||
SOUNDCLOUD_CLIENT_ID,
|
|
||||||
SOUNDCLOUD_USER_ID,
|
SOUNDCLOUD_USER_ID,
|
||||||
TIDAL_AUTH_URL,
|
TIDAL_AUTH_URL,
|
||||||
TIDAL_BASE,
|
TIDAL_BASE,
|
||||||
|
@ -240,9 +237,7 @@ class QobuzClient(Client):
|
||||||
:rtype: dict
|
:rtype: dict
|
||||||
"""
|
"""
|
||||||
page, status_code = self._api_request(epoint, params)
|
page, status_code = self._api_request(epoint, params)
|
||||||
logger.debug(
|
logger.debug("Keys returned from _gen_pages: %s", ", ".join(page.keys()))
|
||||||
"Keys returned from _gen_pages: %s", ", ".join(page.keys())
|
|
||||||
)
|
|
||||||
key = epoint.split("/")[0] + "s"
|
key = epoint.split("/")[0] + "s"
|
||||||
total = page.get(key, {})
|
total = page.get(key, {})
|
||||||
total = total.get("total") or total.get("items")
|
total = total.get("total") or total.get("items")
|
||||||
|
@ -265,8 +260,7 @@ class QobuzClient(Client):
|
||||||
"""Check if the secrets are usable."""
|
"""Check if the secrets are usable."""
|
||||||
with concurrent.futures.ThreadPoolExecutor() as executor:
|
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||||
futures = [
|
futures = [
|
||||||
executor.submit(self._test_secret, secret)
|
executor.submit(self._test_secret, secret) for secret in self.secrets
|
||||||
for secret in self.secrets
|
|
||||||
]
|
]
|
||||||
|
|
||||||
for future in concurrent.futures.as_completed(futures):
|
for future in concurrent.futures.as_completed(futures):
|
||||||
|
@ -309,15 +303,11 @@ class QobuzClient(Client):
|
||||||
|
|
||||||
response, status_code = self._api_request(epoint, params)
|
response, status_code = self._api_request(epoint, params)
|
||||||
if status_code != 200:
|
if status_code != 200:
|
||||||
raise Exception(
|
raise Exception(f'Error fetching metadata. "{response["message"]}"')
|
||||||
f'Error fetching metadata. "{response["message"]}"'
|
|
||||||
)
|
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def _api_search(
|
def _api_search(self, query: str, media_type: str, limit: int = 500) -> Generator:
|
||||||
self, query: str, media_type: str, limit: int = 500
|
|
||||||
) -> Generator:
|
|
||||||
"""Send a search request to the API.
|
"""Send a search request to the API.
|
||||||
|
|
||||||
:param query:
|
:param query:
|
||||||
|
@ -369,9 +359,7 @@ class QobuzClient(Client):
|
||||||
resp, status_code = self._api_request(epoint, params)
|
resp, status_code = self._api_request(epoint, params)
|
||||||
|
|
||||||
if status_code == 401:
|
if status_code == 401:
|
||||||
raise AuthenticationError(
|
raise AuthenticationError(f"Invalid credentials from params {params}")
|
||||||
f"Invalid credentials from params {params}"
|
|
||||||
)
|
|
||||||
elif status_code == 400:
|
elif status_code == 400:
|
||||||
logger.debug(resp)
|
logger.debug(resp)
|
||||||
raise InvalidAppIdError(f"Invalid app id from params {params}")
|
raise InvalidAppIdError(f"Invalid app id from params {params}")
|
||||||
|
@ -379,9 +367,7 @@ class QobuzClient(Client):
|
||||||
logger.info("Logged in to Qobuz")
|
logger.info("Logged in to Qobuz")
|
||||||
|
|
||||||
if not resp["user"]["credential"]["parameters"]:
|
if not resp["user"]["credential"]["parameters"]:
|
||||||
raise IneligibleError(
|
raise IneligibleError("Free accounts are not eligible to download tracks.")
|
||||||
"Free accounts are not eligible to download tracks."
|
|
||||||
)
|
|
||||||
|
|
||||||
self.uat = resp["user_auth_token"]
|
self.uat = resp["user_auth_token"]
|
||||||
self.session.headers.update({"X-User-Auth-Token": self.uat})
|
self.session.headers.update({"X-User-Auth-Token": self.uat})
|
||||||
|
@ -430,9 +416,7 @@ class QobuzClient(Client):
|
||||||
}
|
}
|
||||||
response, status_code = self._api_request("track/getFileUrl", params)
|
response, status_code = self._api_request("track/getFileUrl", params)
|
||||||
if status_code == 400:
|
if status_code == 400:
|
||||||
raise InvalidAppSecretError(
|
raise InvalidAppSecretError("Invalid app secret from params %s" % params)
|
||||||
"Invalid app secret from params %s" % params
|
|
||||||
)
|
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@ -451,9 +435,7 @@ class QobuzClient(Client):
|
||||||
logger.debug(r.text)
|
logger.debug(r.text)
|
||||||
return r.json(), r.status_code
|
return r.json(), r.status_code
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error(
|
logger.error("Problem getting JSON. Status code: %s", r.status_code)
|
||||||
"Problem getting JSON. Status code: %s", r.status_code
|
|
||||||
)
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _test_secret(self, secret: str) -> Optional[str]:
|
def _test_secret(self, secret: str) -> Optional[str]:
|
||||||
|
@ -485,9 +467,7 @@ class DeezerClient(Client):
|
||||||
# no login required
|
# no login required
|
||||||
self.logged_in = False
|
self.logged_in = False
|
||||||
|
|
||||||
def search(
|
def search(self, query: str, media_type: str = "album", limit: int = 200) -> dict:
|
||||||
self, query: str, media_type: str = "album", limit: int = 200
|
|
||||||
) -> dict:
|
|
||||||
"""Search API for query.
|
"""Search API for query.
|
||||||
|
|
||||||
:param query:
|
:param query:
|
||||||
|
@ -501,16 +481,12 @@ class DeezerClient(Client):
|
||||||
try:
|
try:
|
||||||
if media_type == "featured":
|
if media_type == "featured":
|
||||||
if query:
|
if query:
|
||||||
search_function = getattr(
|
search_function = getattr(self.client.api, f"get_editorial_{query}")
|
||||||
self.client.api, f"get_editorial_{query}"
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
search_function = self.client.api.get_editorial_releases
|
search_function = self.client.api.get_editorial_releases
|
||||||
|
|
||||||
else:
|
else:
|
||||||
search_function = getattr(
|
search_function = getattr(self.client.api, f"search_{media_type}")
|
||||||
self.client.api, f"search_{media_type}"
|
|
||||||
)
|
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise Exception
|
raise Exception
|
||||||
|
|
||||||
|
@ -584,9 +560,9 @@ class DeezerClient(Client):
|
||||||
format_no, format_str = format_info
|
format_no, format_str = format_info
|
||||||
|
|
||||||
dl_info["size_to_quality"] = {
|
dl_info["size_to_quality"] = {
|
||||||
int(
|
int(track_info.get(f"FILESIZE_{format}")): self._quality_id_from_filetype(
|
||||||
track_info.get(f"FILESIZE_{format}")
|
format
|
||||||
): self._quality_id_from_filetype(format)
|
)
|
||||||
for format in DEEZER_FORMATS
|
for format in DEEZER_FORMATS
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -627,9 +603,7 @@ class DeezerClient(Client):
|
||||||
logger.debug("Info bytes: %s", info_bytes)
|
logger.debug("Info bytes: %s", info_bytes)
|
||||||
path = self._gen_url_path(info_bytes)
|
path = self._gen_url_path(info_bytes)
|
||||||
logger.debug(path)
|
logger.debug(path)
|
||||||
return (
|
return f"https://e-cdns-proxy-{track_hash[0]}.dzcdn.net/mobile/1/{path}"
|
||||||
f"https://e-cdns-proxy-{track_hash[0]}.dzcdn.net/mobile/1/{path}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def _gen_url_path(self, data):
|
def _gen_url_path(self, data):
|
||||||
return binascii.hexlify(
|
return binascii.hexlify(
|
||||||
|
@ -659,9 +633,7 @@ class DeezloaderClient(Client):
|
||||||
# no login required
|
# no login required
|
||||||
self.logged_in = True
|
self.logged_in = True
|
||||||
|
|
||||||
def search(
|
def search(self, query: str, media_type: str = "album", limit: int = 200) -> dict:
|
||||||
self, query: str, media_type: str = "album", limit: int = 200
|
|
||||||
) -> dict:
|
|
||||||
"""Search API for query.
|
"""Search API for query.
|
||||||
|
|
||||||
:param query:
|
:param query:
|
||||||
|
@ -698,9 +670,7 @@ class DeezloaderClient(Client):
|
||||||
url = f"{DEEZER_BASE}/{media_type}/{meta_id}"
|
url = f"{DEEZER_BASE}/{media_type}/{meta_id}"
|
||||||
item = self.session.get(url).json()
|
item = self.session.get(url).json()
|
||||||
if media_type in ("album", "playlist"):
|
if media_type in ("album", "playlist"):
|
||||||
tracks = self.session.get(
|
tracks = self.session.get(f"{url}/tracks", params={"limit": 1000}).json()
|
||||||
f"{url}/tracks", params={"limit": 1000}
|
|
||||||
).json()
|
|
||||||
item["tracks"] = tracks["data"]
|
item["tracks"] = tracks["data"]
|
||||||
item["track_total"] = len(tracks["data"])
|
item["track_total"] = len(tracks["data"])
|
||||||
elif media_type == "artist":
|
elif media_type == "artist":
|
||||||
|
@ -796,9 +766,7 @@ class TidalClient(Client):
|
||||||
logger.debug(resp)
|
logger.debug(resp)
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
def search(
|
def search(self, query: str, media_type: str = "album", limit: int = 100) -> dict:
|
||||||
self, query: str, media_type: str = "album", limit: int = 100
|
|
||||||
) -> dict:
|
|
||||||
"""Search for a query.
|
"""Search for a query.
|
||||||
|
|
||||||
:param query:
|
:param query:
|
||||||
|
@ -827,19 +795,13 @@ class TidalClient(Client):
|
||||||
return self._get_video_stream_url(track_id)
|
return self._get_video_stream_url(track_id)
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
"audioquality": get_quality(
|
"audioquality": get_quality(min(quality, TIDAL_MAX_Q), self.source),
|
||||||
min(quality, TIDAL_MAX_Q), self.source
|
|
||||||
),
|
|
||||||
"playbackmode": "STREAM",
|
"playbackmode": "STREAM",
|
||||||
"assetpresentation": "FULL",
|
"assetpresentation": "FULL",
|
||||||
}
|
}
|
||||||
resp = self._api_request(
|
resp = self._api_request(f"tracks/{track_id}/playbackinfopostpaywall", params)
|
||||||
f"tracks/{track_id}/playbackinfopostpaywall", params
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
manifest = json.loads(
|
manifest = json.loads(base64.b64decode(resp["manifest"]).decode("utf-8"))
|
||||||
base64.b64decode(resp["manifest"]).decode("utf-8")
|
|
||||||
)
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise Exception(resp["userMessage"])
|
raise Exception(resp["userMessage"])
|
||||||
|
|
||||||
|
@ -1044,9 +1006,7 @@ class TidalClient(Client):
|
||||||
offset += 100
|
offset += 100
|
||||||
tracks_left -= 100
|
tracks_left -= 100
|
||||||
resp["items"].extend(
|
resp["items"].extend(
|
||||||
self._api_request(f"{url}/items", {"offset": offset})[
|
self._api_request(f"{url}/items", {"offset": offset})["items"]
|
||||||
"items"
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
item["tracks"] = [item["item"] for item in resp["items"]]
|
item["tracks"] = [item["item"] for item in resp["items"]]
|
||||||
|
@ -1096,9 +1056,7 @@ class TidalClient(Client):
|
||||||
resp = self._api_request(
|
resp = self._api_request(
|
||||||
f"videos/{video_id}/playbackinfopostpaywall", params=params
|
f"videos/{video_id}/playbackinfopostpaywall", params=params
|
||||||
)
|
)
|
||||||
manifest = json.loads(
|
manifest = json.loads(base64.b64decode(resp["manifest"]).decode("utf-8"))
|
||||||
base64.b64decode(resp["manifest"]).decode("utf-8")
|
|
||||||
)
|
|
||||||
available_urls = self.session.get(manifest["urls"][0])
|
available_urls = self.session.get(manifest["urls"][0])
|
||||||
available_urls.encoding = "utf-8"
|
available_urls.encoding = "utf-8"
|
||||||
|
|
||||||
|
|
|
@ -52,9 +52,7 @@ class Converter:
|
||||||
|
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.final_fn = f"{os.path.splitext(filename)[0]}.{self.container}"
|
self.final_fn = f"{os.path.splitext(filename)[0]}.{self.container}"
|
||||||
self.tempfile = os.path.join(
|
self.tempfile = os.path.join(gettempdir(), os.path.basename(self.final_fn))
|
||||||
gettempdir(), os.path.basename(self.final_fn)
|
|
||||||
)
|
|
||||||
self.remove_source = remove_source
|
self.remove_source = remove_source
|
||||||
self.sampling_rate = sampling_rate
|
self.sampling_rate = sampling_rate
|
||||||
self.bit_depth = bit_depth
|
self.bit_depth = bit_depth
|
||||||
|
@ -119,13 +117,9 @@ class Converter:
|
||||||
if self.lossless:
|
if self.lossless:
|
||||||
if isinstance(self.sampling_rate, int):
|
if isinstance(self.sampling_rate, int):
|
||||||
sampling_rates = "|".join(
|
sampling_rates = "|".join(
|
||||||
str(rate)
|
str(rate) for rate in SAMPLING_RATES if rate <= self.sampling_rate
|
||||||
for rate in SAMPLING_RATES
|
|
||||||
if rate <= self.sampling_rate
|
|
||||||
)
|
|
||||||
command.extend(
|
|
||||||
["-af", f"aformat=sample_rates={sampling_rates}"]
|
|
||||||
)
|
)
|
||||||
|
command.extend(["-af", f"aformat=sample_rates={sampling_rates}"])
|
||||||
|
|
||||||
elif self.sampling_rate is not None:
|
elif self.sampling_rate is not None:
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
|
@ -140,9 +134,7 @@ class Converter:
|
||||||
else:
|
else:
|
||||||
raise ValueError("Bit depth must be 16, 24, or 32")
|
raise ValueError("Bit depth must be 16, 24, or 32")
|
||||||
elif self.bit_depth is not None:
|
elif self.bit_depth is not None:
|
||||||
raise TypeError(
|
raise TypeError(f"Bit depth must be int, not {type(self.bit_depth)}")
|
||||||
f"Bit depth must be int, not {type(self.bit_depth)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# automatically overwrite
|
# automatically overwrite
|
||||||
command.extend(["-y", self.tempfile])
|
command.extend(["-y", self.tempfile])
|
||||||
|
@ -207,9 +199,7 @@ class Vorbis(Converter):
|
||||||
codec_name = "vorbis"
|
codec_name = "vorbis"
|
||||||
codec_lib = "libvorbis"
|
codec_lib = "libvorbis"
|
||||||
container = "ogg"
|
container = "ogg"
|
||||||
default_ffmpeg_arg = (
|
default_ffmpeg_arg = "-q:a 6" # 160, aka the "high" quality profile from Spotify
|
||||||
"-q:a 6" # 160, aka the "high" quality profile from Spotify
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class OPUS(Converter):
|
class OPUS(Converter):
|
||||||
|
|
|
@ -74,9 +74,7 @@ class DownloadStream:
|
||||||
info = self.request.json()
|
info = self.request.json()
|
||||||
try:
|
try:
|
||||||
# Usually happens with deezloader downloads
|
# Usually happens with deezloader downloads
|
||||||
raise NonStreamable(
|
raise NonStreamable(f"{info['error']} - {info['message']}")
|
||||||
f"{info['error']} -- {info['message']}"
|
|
||||||
)
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise NonStreamable(info)
|
raise NonStreamable(info)
|
||||||
|
|
||||||
|
@ -88,10 +86,7 @@ class DownloadStream:
|
||||||
|
|
||||||
:rtype: Iterator
|
:rtype: Iterator
|
||||||
"""
|
"""
|
||||||
if (
|
if self.source == "deezer" and self.is_encrypted.search(self.url) is not None:
|
||||||
self.source == "deezer"
|
|
||||||
and self.is_encrypted.search(self.url) is not None
|
|
||||||
):
|
|
||||||
assert isinstance(self.id, str), self.id
|
assert isinstance(self.id, str), self.id
|
||||||
|
|
||||||
blowfish_key = self._generate_blowfish_key(self.id)
|
blowfish_key = self._generate_blowfish_key(self.id)
|
||||||
|
@ -99,10 +94,7 @@ class DownloadStream:
|
||||||
CHUNK_SIZE = 2048 * 3
|
CHUNK_SIZE = 2048 * 3
|
||||||
return (
|
return (
|
||||||
# (decryptor.decrypt(chunk[:2048]) + chunk[2048:])
|
# (decryptor.decrypt(chunk[:2048]) + chunk[2048:])
|
||||||
(
|
(self._decrypt_chunk(blowfish_key, chunk[:2048]) + chunk[2048:])
|
||||||
self._decrypt_chunk(blowfish_key, chunk[:2048])
|
|
||||||
+ chunk[2048:]
|
|
||||||
)
|
|
||||||
if len(chunk) >= 2048
|
if len(chunk) >= 2048
|
||||||
else chunk
|
else chunk
|
||||||
for chunk in self.request.iter_content(CHUNK_SIZE)
|
for chunk in self.request.iter_content(CHUNK_SIZE)
|
||||||
|
@ -123,9 +115,7 @@ class DownloadStream:
|
||||||
return self.file_size
|
return self.file_size
|
||||||
|
|
||||||
def _create_deezer_decryptor(self, key) -> Blowfish:
|
def _create_deezer_decryptor(self, key) -> Blowfish:
|
||||||
return Blowfish.new(
|
return Blowfish.new(key, Blowfish.MODE_CBC, b"\x00\x01\x02\x03\x04\x05\x06\x07")
|
||||||
key, Blowfish.MODE_CBC, b"\x00\x01\x02\x03\x04\x05\x06\x07"
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _generate_blowfish_key(track_id: str):
|
def _generate_blowfish_key(track_id: str):
|
||||||
|
@ -178,9 +168,7 @@ class DownloadPool:
|
||||||
self.tempdir = tempdir
|
self.tempdir = tempdir
|
||||||
|
|
||||||
async def getfn(self, url):
|
async def getfn(self, url):
|
||||||
path = os.path.join(
|
path = os.path.join(self.tempdir, f"__streamrip_partial_{abs(hash(url))}")
|
||||||
self.tempdir, f"__streamrip_partial_{abs(hash(url))}"
|
|
||||||
)
|
|
||||||
self._paths[url] = path
|
self._paths[url] = path
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
@ -195,9 +183,7 @@ class DownloadPool:
|
||||||
async def _download_url(self, session, url):
|
async def _download_url(self, session, url):
|
||||||
filename = await self.getfn(url)
|
filename = await self.getfn(url)
|
||||||
logger.debug("Downloading %s", url)
|
logger.debug("Downloading %s", url)
|
||||||
async with session.get(url) as response, aiofiles.open(
|
async with session.get(url) as response, aiofiles.open(filename, "wb") as f:
|
||||||
filename, "wb"
|
|
||||||
) as f:
|
|
||||||
# without aiofiles 3.6632679780000004s
|
# without aiofiles 3.6632679780000004s
|
||||||
# with aiofiles 2.504482839s
|
# with aiofiles 2.504482839s
|
||||||
await f.write(await response.content.read())
|
await f.write(await response.content.read())
|
||||||
|
@ -215,9 +201,7 @@ class DownloadPool:
|
||||||
def files(self):
|
def files(self):
|
||||||
if len(self._paths) != len(self.urls):
|
if len(self._paths) != len(self.urls):
|
||||||
# Not all of them have downloaded
|
# Not all of them have downloaded
|
||||||
raise Exception(
|
raise Exception("Must run DownloadPool.download() before accessing files")
|
||||||
"Must run DownloadPool.download() before accessing files"
|
|
||||||
)
|
|
||||||
|
|
||||||
return [
|
return [
|
||||||
os.path.join(self.tempdir, self._paths[self.urls[i]])
|
os.path.join(self.tempdir, self._paths[self.urls[i]])
|
||||||
|
|
|
@ -68,9 +68,7 @@ logger = logging.getLogger("streamrip")
|
||||||
|
|
||||||
TYPE_REGEXES = {
|
TYPE_REGEXES = {
|
||||||
"remaster": re.compile(r"(?i)(re)?master(ed)?"),
|
"remaster": re.compile(r"(?i)(re)?master(ed)?"),
|
||||||
"extra": re.compile(
|
"extra": re.compile(r"(?i)(anniversary|deluxe|live|collector|demo|expanded)"),
|
||||||
r"(?i)(anniversary|deluxe|live|collector|demo|expanded)"
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -270,9 +268,7 @@ class Track(Media):
|
||||||
except ItemExists as e:
|
except ItemExists as e:
|
||||||
logger.debug(e)
|
logger.debug(e)
|
||||||
|
|
||||||
self.path = os.path.join(
|
self.path = os.path.join(gettempdir(), f"{hash(self.id)}_{self.quality}.tmp")
|
||||||
gettempdir(), f"{hash(self.id)}_{self.quality}.tmp"
|
|
||||||
)
|
|
||||||
|
|
||||||
def download( # noqa
|
def download( # noqa
|
||||||
self,
|
self,
|
||||||
|
@ -327,14 +323,9 @@ class Track(Media):
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
if restrictions := dl_info["restrictions"]:
|
if restrictions := dl_info["restrictions"]:
|
||||||
# Turn CamelCase code into a readable sentence
|
# Turn CamelCase code into a readable sentence
|
||||||
words = re.findall(
|
words = re.findall(r"([A-Z][a-z]+)", restrictions[0]["code"])
|
||||||
r"([A-Z][a-z]+)", restrictions[0]["code"]
|
|
||||||
)
|
|
||||||
raise NonStreamable(
|
raise NonStreamable(
|
||||||
words[0]
|
words[0] + " " + " ".join(map(str.lower, words[1:])) + "."
|
||||||
+ " "
|
|
||||||
+ " ".join(map(str.lower, words[1:]))
|
|
||||||
+ "."
|
|
||||||
)
|
)
|
||||||
|
|
||||||
secho(f"Panic: {e} dl_info = {dl_info}", fg="red")
|
secho(f"Panic: {e} dl_info = {dl_info}", fg="red")
|
||||||
|
@ -343,9 +334,7 @@ class Track(Media):
|
||||||
_quick_download(download_url, self.path, desc=self._progress_desc)
|
_quick_download(download_url, self.path, desc=self._progress_desc)
|
||||||
|
|
||||||
elif isinstance(self.client, DeezloaderClient):
|
elif isinstance(self.client, DeezloaderClient):
|
||||||
_quick_download(
|
_quick_download(dl_info["url"], self.path, desc=self._progress_desc)
|
||||||
dl_info["url"], self.path, desc=self._progress_desc
|
|
||||||
)
|
|
||||||
|
|
||||||
elif self.client.source == "deezer":
|
elif self.client.source == "deezer":
|
||||||
# We can only find out if the requested quality is available
|
# We can only find out if the requested quality is available
|
||||||
|
@ -457,13 +446,9 @@ class Track(Media):
|
||||||
parsed_m3u = m3u8.loads(requests.get(dl_info["url"]).text)
|
parsed_m3u = m3u8.loads(requests.get(dl_info["url"]).text)
|
||||||
self.path += ".mp3"
|
self.path += ".mp3"
|
||||||
|
|
||||||
with DownloadPool(
|
with DownloadPool(segment.uri for segment in parsed_m3u.segments) as pool:
|
||||||
segment.uri for segment in parsed_m3u.segments
|
|
||||||
) as pool:
|
|
||||||
|
|
||||||
bar = get_tqdm_bar(
|
bar = get_tqdm_bar(len(pool), desc=self._progress_desc, unit="Chunk")
|
||||||
len(pool), desc=self._progress_desc, unit="Chunk"
|
|
||||||
)
|
|
||||||
|
|
||||||
def update_tqdm_bar():
|
def update_tqdm_bar():
|
||||||
bar.update(1)
|
bar.update(1)
|
||||||
|
@ -483,9 +468,7 @@ class Track(Media):
|
||||||
)
|
)
|
||||||
|
|
||||||
elif dl_info["type"] == "original":
|
elif dl_info["type"] == "original":
|
||||||
_quick_download(
|
_quick_download(dl_info["url"], self.path, desc=self._progress_desc)
|
||||||
dl_info["url"], self.path, desc=self._progress_desc
|
|
||||||
)
|
|
||||||
|
|
||||||
# if a wav is returned, convert to flac
|
# if a wav is returned, convert to flac
|
||||||
engine = converter.FLAC(self.path)
|
engine = converter.FLAC(self.path)
|
||||||
|
@ -513,9 +496,7 @@ class Track(Media):
|
||||||
|
|
||||||
def download_cover(self, width=999999, height=999999):
|
def download_cover(self, width=999999, height=999999):
|
||||||
"""Download the cover art, if cover_url is given."""
|
"""Download the cover art, if cover_url is given."""
|
||||||
self.cover_path = os.path.join(
|
self.cover_path = os.path.join(gettempdir(), f"cover{hash(self.cover_url)}.jpg")
|
||||||
gettempdir(), f"cover{hash(self.cover_url)}.jpg"
|
|
||||||
)
|
|
||||||
logger.debug("Downloading cover from %s", self.cover_url)
|
logger.debug("Downloading cover from %s", self.cover_url)
|
||||||
|
|
||||||
if not os.path.exists(self.cover_path):
|
if not os.path.exists(self.cover_path):
|
||||||
|
@ -535,9 +516,9 @@ class Track(Media):
|
||||||
formatter = self.meta.get_formatter(max_quality=self.quality)
|
formatter = self.meta.get_formatter(max_quality=self.quality)
|
||||||
logger.debug("Track meta formatter %s", formatter)
|
logger.debug("Track meta formatter %s", formatter)
|
||||||
filename = clean_format(self.file_format, formatter, restrict=restrict)
|
filename = clean_format(self.file_format, formatter, restrict=restrict)
|
||||||
self.final_path = os.path.join(self.folder, filename)[
|
self.final_path = os.path.join(self.folder, filename)[:250].strip() + ext(
|
||||||
:250
|
self.quality, self.client.source
|
||||||
].strip() + ext(self.quality, self.client.source)
|
)
|
||||||
|
|
||||||
logger.debug("Formatted path: %s", self.final_path)
|
logger.debug("Formatted path: %s", self.final_path)
|
||||||
|
|
||||||
|
@ -550,9 +531,7 @@ class Track(Media):
|
||||||
return self.final_path
|
return self.final_path
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_album_meta(
|
def from_album_meta(cls, album: TrackMetadata, track: dict, client: Client):
|
||||||
cls, album: TrackMetadata, track: dict, client: Client
|
|
||||||
):
|
|
||||||
"""Return a new Track object initialized with info.
|
"""Return a new Track object initialized with info.
|
||||||
|
|
||||||
:param album: album metadata returned by API
|
:param album: album metadata returned by API
|
||||||
|
@ -562,9 +541,7 @@ class Track(Media):
|
||||||
:raises: IndexError
|
:raises: IndexError
|
||||||
"""
|
"""
|
||||||
meta = TrackMetadata(album=album, track=track, source=client.source)
|
meta = TrackMetadata(album=album, track=track, source=client.source)
|
||||||
return cls(
|
return cls(client=client, meta=meta, id=track["id"], part_of_tracklist=True)
|
||||||
client=client, meta=meta, id=track["id"], part_of_tracklist=True
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_api(cls, item: dict, client: Client):
|
def from_api(cls, item: dict, client: Client):
|
||||||
|
@ -624,9 +601,7 @@ class Track(Media):
|
||||||
:param embed_cover: Embed cover art into file
|
:param embed_cover: Embed cover art into file
|
||||||
:type embed_cover: bool
|
:type embed_cover: bool
|
||||||
"""
|
"""
|
||||||
assert isinstance(
|
assert isinstance(self.meta, TrackMetadata), "meta must be TrackMetadata"
|
||||||
self.meta, TrackMetadata
|
|
||||||
), "meta must be TrackMetadata"
|
|
||||||
if not self.downloaded:
|
if not self.downloaded:
|
||||||
logger.info(
|
logger.info(
|
||||||
"Track %s not tagged because it was not downloaded",
|
"Track %s not tagged because it was not downloaded",
|
||||||
|
@ -750,9 +725,7 @@ class Track(Media):
|
||||||
self.format_final_path(kwargs.get("restrict_filenames", False))
|
self.format_final_path(kwargs.get("restrict_filenames", False))
|
||||||
|
|
||||||
if not os.path.isfile(self.path):
|
if not os.path.isfile(self.path):
|
||||||
logger.info(
|
logger.info("File %s does not exist. Skipping conversion.", self.path)
|
||||||
"File %s does not exist. Skipping conversion.", self.path
|
|
||||||
)
|
|
||||||
secho(f"{self!s} does not exist. Skipping conversion.", fg="red")
|
secho(f"{self!s} does not exist. Skipping conversion.", fg="red")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -892,12 +865,8 @@ class Video(Media):
|
||||||
parsed_m3u = m3u8.loads(requests.get(url).text)
|
parsed_m3u = m3u8.loads(requests.get(url).text)
|
||||||
# Asynchronously download the streams
|
# Asynchronously download the streams
|
||||||
|
|
||||||
with DownloadPool(
|
with DownloadPool(segment.uri for segment in parsed_m3u.segments) as pool:
|
||||||
segment.uri for segment in parsed_m3u.segments
|
bar = get_tqdm_bar(len(pool), desc=self._progress_desc, unit="Chunk")
|
||||||
) as pool:
|
|
||||||
bar = get_tqdm_bar(
|
|
||||||
len(pool), desc=self._progress_desc, unit="Chunk"
|
|
||||||
)
|
|
||||||
|
|
||||||
def update_tqdm_bar():
|
def update_tqdm_bar():
|
||||||
bar.update(1)
|
bar.update(1)
|
||||||
|
@ -906,9 +875,7 @@ class Video(Media):
|
||||||
|
|
||||||
# Put the filenames in a tempfile that ffmpeg
|
# Put the filenames in a tempfile that ffmpeg
|
||||||
# can read from
|
# can read from
|
||||||
file_list_path = os.path.join(
|
file_list_path = os.path.join(gettempdir(), "__streamrip_video_files")
|
||||||
gettempdir(), "__streamrip_video_files"
|
|
||||||
)
|
|
||||||
with open(file_list_path, "w") as file_list:
|
with open(file_list_path, "w") as file_list:
|
||||||
text = "\n".join(f"file '{path}'" for path in pool.files)
|
text = "\n".join(f"file '{path}'" for path in pool.files)
|
||||||
file_list.write(text)
|
file_list.write(text)
|
||||||
|
@ -1149,9 +1116,7 @@ class Booklet:
|
||||||
:type parent_folder: str
|
:type parent_folder: str
|
||||||
:param kwargs:
|
:param kwargs:
|
||||||
"""
|
"""
|
||||||
fn = clean_filename(
|
fn = clean_filename(self.description, restrict=kwargs.get("restrict_filenames"))
|
||||||
self.description, restrict=kwargs.get("restrict_filenames")
|
|
||||||
)
|
|
||||||
filepath = os.path.join(parent_folder, f"{fn}.pdf")
|
filepath = os.path.join(parent_folder, f"{fn}.pdf")
|
||||||
|
|
||||||
_quick_download(self.url, filepath, "Booklet")
|
_quick_download(self.url, filepath, "Booklet")
|
||||||
|
@ -1206,8 +1171,7 @@ class Tracklist(list):
|
||||||
kwargs.get("max_connections", 3)
|
kwargs.get("max_connections", 3)
|
||||||
) as executor:
|
) as executor:
|
||||||
future_map = {
|
future_map = {
|
||||||
executor.submit(target, item, **kwargs): item
|
executor.submit(target, item, **kwargs): item for item in self
|
||||||
for item in self
|
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
concurrent.futures.wait(future_map.keys())
|
concurrent.futures.wait(future_map.keys())
|
||||||
|
@ -1238,9 +1202,7 @@ class Tracklist(list):
|
||||||
secho(f"{item!s} exists. Skipping.", fg="yellow")
|
secho(f"{item!s} exists. Skipping.", fg="yellow")
|
||||||
except NonStreamable as e:
|
except NonStreamable as e:
|
||||||
e.print(item)
|
e.print(item)
|
||||||
failed_downloads.append(
|
failed_downloads.append((item.client.source, item.type, item.id))
|
||||||
(item.client.source, item.type, item.id)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.downloaded = True
|
self.downloaded = True
|
||||||
|
|
||||||
|
@ -1596,9 +1558,7 @@ class Album(Tracklist, Media):
|
||||||
and isinstance(item, Track)
|
and isinstance(item, Track)
|
||||||
and kwargs.get("folder_format")
|
and kwargs.get("folder_format")
|
||||||
):
|
):
|
||||||
disc_folder = os.path.join(
|
disc_folder = os.path.join(self.folder, f"Disc {item.meta.discnumber}")
|
||||||
self.folder, f"Disc {item.meta.discnumber}"
|
|
||||||
)
|
|
||||||
kwargs["parent_folder"] = disc_folder
|
kwargs["parent_folder"] = disc_folder
|
||||||
else:
|
else:
|
||||||
kwargs["parent_folder"] = self.folder
|
kwargs["parent_folder"] = self.folder
|
||||||
|
@ -1684,9 +1644,7 @@ class Album(Tracklist, Media):
|
||||||
logger.debug("Formatter: %s", fmt)
|
logger.debug("Formatter: %s", fmt)
|
||||||
return fmt
|
return fmt
|
||||||
|
|
||||||
def _get_formatted_folder(
|
def _get_formatted_folder(self, parent_folder: str, restrict: bool = False) -> str:
|
||||||
self, parent_folder: str, restrict: bool = False
|
|
||||||
) -> str:
|
|
||||||
"""Generate the folder name for this album.
|
"""Generate the folder name for this album.
|
||||||
|
|
||||||
:param parent_folder:
|
:param parent_folder:
|
||||||
|
@ -1818,9 +1776,7 @@ class Playlist(Tracklist, Media):
|
||||||
if self.client.source == "qobuz":
|
if self.client.source == "qobuz":
|
||||||
self.name = self.meta["name"]
|
self.name = self.meta["name"]
|
||||||
self.image = self.meta["images"]
|
self.image = self.meta["images"]
|
||||||
self.creator = safe_get(
|
self.creator = safe_get(self.meta, "owner", "name", default="Qobuz")
|
||||||
self.meta, "owner", "name", default="Qobuz"
|
|
||||||
)
|
|
||||||
|
|
||||||
tracklist = self.meta["tracks"]["items"]
|
tracklist = self.meta["tracks"]["items"]
|
||||||
|
|
||||||
|
@ -1830,9 +1786,7 @@ class Playlist(Tracklist, Media):
|
||||||
elif self.client.source == "tidal":
|
elif self.client.source == "tidal":
|
||||||
self.name = self.meta["title"]
|
self.name = self.meta["title"]
|
||||||
self.image = tidal_cover_url(self.meta["image"], 640)
|
self.image = tidal_cover_url(self.meta["image"], 640)
|
||||||
self.creator = safe_get(
|
self.creator = safe_get(self.meta, "creator", "name", default="TIDAL")
|
||||||
self.meta, "creator", "name", default="TIDAL"
|
|
||||||
)
|
|
||||||
|
|
||||||
tracklist = self.meta["tracks"]
|
tracklist = self.meta["tracks"]
|
||||||
|
|
||||||
|
@ -1845,9 +1799,7 @@ class Playlist(Tracklist, Media):
|
||||||
elif self.client.source == "deezer":
|
elif self.client.source == "deezer":
|
||||||
self.name = self.meta["title"]
|
self.name = self.meta["title"]
|
||||||
self.image = self.meta["picture_big"]
|
self.image = self.meta["picture_big"]
|
||||||
self.creator = safe_get(
|
self.creator = safe_get(self.meta, "creator", "name", default="Deezer")
|
||||||
self.meta, "creator", "name", default="Deezer"
|
|
||||||
)
|
|
||||||
|
|
||||||
tracklist = self.meta["tracks"]
|
tracklist = self.meta["tracks"]
|
||||||
|
|
||||||
|
@ -1888,13 +1840,9 @@ class Playlist(Tracklist, Media):
|
||||||
|
|
||||||
logger.debug("Loaded %d tracks from playlist %s", len(self), self.name)
|
logger.debug("Loaded %d tracks from playlist %s", len(self), self.name)
|
||||||
|
|
||||||
def _prepare_download(
|
def _prepare_download(self, parent_folder: str = "StreamripDownloads", **kwargs):
|
||||||
self, parent_folder: str = "StreamripDownloads", **kwargs
|
|
||||||
):
|
|
||||||
if kwargs.get("folder_format"):
|
if kwargs.get("folder_format"):
|
||||||
fname = clean_filename(
|
fname = clean_filename(self.name, kwargs.get("restrict_filenames", False))
|
||||||
self.name, kwargs.get("restrict_filenames", False)
|
|
||||||
)
|
|
||||||
self.folder = os.path.join(parent_folder, fname)
|
self.folder = os.path.join(parent_folder, fname)
|
||||||
else:
|
else:
|
||||||
self.folder = parent_folder
|
self.folder = parent_folder
|
||||||
|
@ -2091,9 +2039,7 @@ class Artist(Tracklist, Media):
|
||||||
:rtype: Iterable
|
:rtype: Iterable
|
||||||
"""
|
"""
|
||||||
if kwargs.get("folder_format"):
|
if kwargs.get("folder_format"):
|
||||||
folder = clean_filename(
|
folder = clean_filename(self.name, kwargs.get("restrict_filenames", False))
|
||||||
self.name, kwargs.get("restrict_filenames", False)
|
|
||||||
)
|
|
||||||
self.folder = os.path.join(parent_folder, folder)
|
self.folder = os.path.join(parent_folder, folder)
|
||||||
else:
|
else:
|
||||||
self.folder = parent_folder
|
self.folder = parent_folder
|
||||||
|
@ -2110,9 +2056,7 @@ class Artist(Tracklist, Media):
|
||||||
final = self
|
final = self
|
||||||
|
|
||||||
if isinstance(filters, tuple) and self.client.source == "qobuz":
|
if isinstance(filters, tuple) and self.client.source == "qobuz":
|
||||||
filter_funcs = (
|
filter_funcs = (getattr(self, f"_{filter_}") for filter_ in filters)
|
||||||
getattr(self, f"_{filter_}") for filter_ in filters
|
|
||||||
)
|
|
||||||
for func in filter_funcs:
|
for func in filter_funcs:
|
||||||
final = filter(func, final)
|
final = filter(func, final)
|
||||||
|
|
||||||
|
@ -2225,10 +2169,7 @@ class Artist(Tracklist, Media):
|
||||||
best_bd = bit_depth(a["bit_depth"] for a in group)
|
best_bd = bit_depth(a["bit_depth"] for a in group)
|
||||||
best_sr = sampling_rate(a["sampling_rate"] for a in group)
|
best_sr = sampling_rate(a["sampling_rate"] for a in group)
|
||||||
for album in group:
|
for album in group:
|
||||||
if (
|
if album["bit_depth"] == best_bd and album["sampling_rate"] == best_sr:
|
||||||
album["bit_depth"] == best_bd
|
|
||||||
and album["sampling_rate"] == best_sr
|
|
||||||
):
|
|
||||||
yield album
|
yield album
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
|
@ -132,9 +132,7 @@ class TrackMetadata:
|
||||||
self.album = resp.get("title", "Unknown Album")
|
self.album = resp.get("title", "Unknown Album")
|
||||||
self.tracktotal = resp.get("tracks_count", 1)
|
self.tracktotal = resp.get("tracks_count", 1)
|
||||||
self.genre = resp.get("genres_list") or resp.get("genre") or []
|
self.genre = resp.get("genres_list") or resp.get("genre") or []
|
||||||
self.date = resp.get("release_date_original") or resp.get(
|
self.date = resp.get("release_date_original") or resp.get("release_date")
|
||||||
"release_date"
|
|
||||||
)
|
|
||||||
self.copyright = resp.get("copyright")
|
self.copyright = resp.get("copyright")
|
||||||
|
|
||||||
if artists := resp.get("artists"):
|
if artists := resp.get("artists"):
|
||||||
|
@ -148,9 +146,7 @@ class TrackMetadata:
|
||||||
self.disctotal = (
|
self.disctotal = (
|
||||||
max(
|
max(
|
||||||
track.get("media_number", 1)
|
track.get("media_number", 1)
|
||||||
for track in safe_get(
|
for track in safe_get(resp, "tracks", "items", default=[{}])
|
||||||
resp, "tracks", "items", default=[{}]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
or 1
|
or 1
|
||||||
)
|
)
|
||||||
|
@ -191,22 +187,14 @@ class TrackMetadata:
|
||||||
self.streamable = resp.get("allowStreaming", False)
|
self.streamable = resp.get("allowStreaming", False)
|
||||||
self.id = resp.get("id")
|
self.id = resp.get("id")
|
||||||
|
|
||||||
if q := resp.get(
|
if q := resp.get("audioQuality"): # for album entries in single tracks
|
||||||
"audioQuality"
|
|
||||||
): # for album entries in single tracks
|
|
||||||
self._get_tidal_quality(q)
|
self._get_tidal_quality(q)
|
||||||
|
|
||||||
elif self.__source == "deezer":
|
elif self.__source == "deezer":
|
||||||
self.album = resp.get("title", "Unknown Album")
|
self.album = resp.get("title", "Unknown Album")
|
||||||
self.tracktotal = resp.get("track_total", 0) or resp.get(
|
self.tracktotal = resp.get("track_total", 0) or resp.get("nb_tracks", 0)
|
||||||
"nb_tracks", 0
|
|
||||||
)
|
|
||||||
self.disctotal = (
|
self.disctotal = (
|
||||||
max(
|
max(track.get("disk_number") for track in resp.get("tracks", [{}])) or 1
|
||||||
track.get("disk_number")
|
|
||||||
for track in resp.get("tracks", [{}])
|
|
||||||
)
|
|
||||||
or 1
|
|
||||||
)
|
)
|
||||||
self.genre = safe_get(resp, "genres", "data")
|
self.genre = safe_get(resp, "genres", "data")
|
||||||
self.date = resp.get("release_date")
|
self.date = resp.get("release_date")
|
||||||
|
@ -365,9 +353,7 @@ class TrackMetadata:
|
||||||
|
|
||||||
if isinstance(self._genres, list):
|
if isinstance(self._genres, list):
|
||||||
if self.__source == "qobuz":
|
if self.__source == "qobuz":
|
||||||
genres: Iterable = re.findall(
|
genres: Iterable = re.findall(r"([^\u2192\/]+)", "/".join(self._genres))
|
||||||
r"([^\u2192\/]+)", "/".join(self._genres)
|
|
||||||
)
|
|
||||||
genres = set(genres)
|
genres = set(genres)
|
||||||
elif self.__source == "deezer":
|
elif self.__source == "deezer":
|
||||||
genres = (g["name"] for g in self._genres)
|
genres = (g["name"] for g in self._genres)
|
||||||
|
@ -401,9 +387,7 @@ class TrackMetadata:
|
||||||
if hasattr(self, "_copyright"):
|
if hasattr(self, "_copyright"):
|
||||||
if self._copyright is None:
|
if self._copyright is None:
|
||||||
return None
|
return None
|
||||||
copyright: str = re.sub(
|
copyright: str = re.sub(r"(?i)\(P\)", PHON_COPYRIGHT, self._copyright)
|
||||||
r"(?i)\(P\)", PHON_COPYRIGHT, self._copyright
|
|
||||||
)
|
|
||||||
copyright = re.sub(r"(?i)\(C\)", COPYRIGHT, copyright)
|
copyright = re.sub(r"(?i)\(C\)", COPYRIGHT, copyright)
|
||||||
return copyright
|
return copyright
|
||||||
|
|
||||||
|
@ -463,9 +447,7 @@ class TrackMetadata:
|
||||||
formatter["sampling_rate"] /= 1000
|
formatter["sampling_rate"] /= 1000
|
||||||
return formatter
|
return formatter
|
||||||
|
|
||||||
def tags(
|
def tags(self, container: str = "flac", exclude: Optional[set] = None) -> Generator:
|
||||||
self, container: str = "flac", exclude: Optional[set] = None
|
|
||||||
) -> Generator:
|
|
||||||
"""Create a generator of key, value pairs for use with mutagen.
|
"""Create a generator of key, value pairs for use with mutagen.
|
||||||
|
|
||||||
The *_KEY dicts are organized in the format:
|
The *_KEY dicts are organized in the format:
|
||||||
|
@ -623,9 +605,7 @@ class TrackMetadata:
|
||||||
|
|
||||||
:rtype: int
|
:rtype: int
|
||||||
"""
|
"""
|
||||||
return sum(
|
return sum(hash(v) for v in self.asdict().values() if isinstance(v, Hashable))
|
||||||
hash(v) for v in self.asdict().values() if isinstance(v, Hashable)
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
"""Return the string representation of the metadata object.
|
"""Return the string representation of the metadata object.
|
||||||
|
|
|
@ -84,9 +84,7 @@ __QUALITY_MAP: Dict[str, Dict[int, Union[int, str, Tuple[int, str]]]] = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_quality(
|
def get_quality(quality_id: int, source: str) -> Union[str, int, Tuple[int, str]]:
|
||||||
quality_id: int, source: str
|
|
||||||
) -> Union[str, int, Tuple[int, str]]:
|
|
||||||
"""Get the source-specific quality id.
|
"""Get the source-specific quality id.
|
||||||
|
|
||||||
:param quality_id: the universal quality id (0, 1, 2, 4)
|
:param quality_id: the universal quality id (0, 1, 2, 4)
|
||||||
|
@ -156,9 +154,7 @@ def clean_format(formatter: str, format_info, restrict: bool = False):
|
||||||
clean_dict = dict()
|
clean_dict = dict()
|
||||||
for key in fmt_keys:
|
for key in fmt_keys:
|
||||||
if isinstance(format_info.get(key), (str, float)):
|
if isinstance(format_info.get(key), (str, float)):
|
||||||
clean_dict[key] = clean_filename(
|
clean_dict[key] = clean_filename(str(format_info[key]), restrict=restrict)
|
||||||
str(format_info[key]), restrict=restrict
|
|
||||||
)
|
|
||||||
elif isinstance(format_info.get(key), int): # track/discnumber
|
elif isinstance(format_info.get(key), int): # track/discnumber
|
||||||
clean_dict[key] = f"{format_info[key]:02}"
|
clean_dict[key] = f"{format_info[key]:02}"
|
||||||
else:
|
else:
|
||||||
|
@ -176,9 +172,7 @@ def tidal_cover_url(uuid, size):
|
||||||
possibles = (80, 160, 320, 640, 1280)
|
possibles = (80, 160, 320, 640, 1280)
|
||||||
assert size in possibles, f"size must be in {possibles}"
|
assert size in possibles, f"size must be in {possibles}"
|
||||||
|
|
||||||
return TIDAL_COVER_URL.format(
|
return TIDAL_COVER_URL.format(uuid=uuid.replace("-", "/"), height=size, width=size)
|
||||||
uuid=uuid.replace("-", "/"), height=size, width=size
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def init_log(path: Optional[str] = None, level: str = "DEBUG"):
|
def init_log(path: Optional[str] = None, level: str = "DEBUG"):
|
||||||
|
@ -280,9 +274,7 @@ def gen_threadsafe_session(
|
||||||
headers = {}
|
headers = {}
|
||||||
|
|
||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
adapter = requests.adapters.HTTPAdapter(
|
adapter = requests.adapters.HTTPAdapter(pool_connections=100, pool_maxsize=100)
|
||||||
pool_connections=100, pool_maxsize=100
|
|
||||||
)
|
|
||||||
session.mount("https://", adapter)
|
session.mount("https://", adapter)
|
||||||
session.headers.update(headers)
|
session.headers.update(headers)
|
||||||
return session
|
return session
|
||||||
|
@ -350,9 +342,7 @@ def get_cover_urls(resp: dict, source: str) -> dict:
|
||||||
"picture_xl",
|
"picture_xl",
|
||||||
)
|
)
|
||||||
cover_urls = {
|
cover_urls = {
|
||||||
sk: resp.get(
|
sk: resp.get(rk, resp.get(rkf)) # size key, resp key, resp key fallback
|
||||||
rk, resp.get(rkf)
|
|
||||||
) # size key, resp key, resp key fallback
|
|
||||||
for sk, rk, rkf in zip(
|
for sk, rk, rkf in zip(
|
||||||
COVER_SIZES,
|
COVER_SIZES,
|
||||||
resp_keys,
|
resp_keys,
|
||||||
|
@ -367,9 +357,9 @@ def get_cover_urls(resp: dict, source: str) -> dict:
|
||||||
return cover_urls
|
return cover_urls
|
||||||
|
|
||||||
if source == "soundcloud":
|
if source == "soundcloud":
|
||||||
cover_url = (
|
cover_url = (resp["artwork_url"] or resp["user"].get("avatar_url")).replace(
|
||||||
resp["artwork_url"] or resp["user"].get("avatar_url")
|
"large", "t500x500"
|
||||||
).replace("large", "t500x500")
|
)
|
||||||
|
|
||||||
cover_urls = {"large": cover_url}
|
cover_urls = {"large": cover_url}
|
||||||
|
|
||||||
|
|
|
@ -8,10 +8,7 @@ from streamrip.downloadtools import DownloadPool
|
||||||
def test_downloadpool(tmpdir):
|
def test_downloadpool(tmpdir):
|
||||||
start = time.perf_counter()
|
start = time.perf_counter()
|
||||||
with DownloadPool(
|
with DownloadPool(
|
||||||
(
|
(f"https://pokeapi.co/api/v2/pokemon/{number}" for number in range(1, 151)),
|
||||||
f"https://pokeapi.co/api/v2/pokemon/{number}"
|
|
||||||
for number in range(1, 151)
|
|
||||||
),
|
|
||||||
tempdir=tmpdir,
|
tempdir=tmpdir,
|
||||||
) as pool:
|
) as pool:
|
||||||
pool.download()
|
pool.download()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue