mirror of
https://github.com/nathom/streamrip.git
synced 2025-05-15 23:54:48 -04:00
Change Black max line length to 79
This commit is contained in:
parent
e941b89153
commit
f8df594031
11 changed files with 324 additions and 107 deletions
|
@ -45,7 +45,9 @@ logger = logging.getLogger("streamrip")
|
||||||
|
|
||||||
TYPE_REGEXES = {
|
TYPE_REGEXES = {
|
||||||
"remaster": re.compile(r"(?i)(re)?master(ed)?"),
|
"remaster": re.compile(r"(?i)(re)?master(ed)?"),
|
||||||
"extra": re.compile(r"(?i)(anniversary|deluxe|live|collector|demo|expanded)"),
|
"extra": re.compile(
|
||||||
|
r"(?i)(anniversary|deluxe|live|collector|demo|expanded)"
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -118,12 +120,15 @@ class Track:
|
||||||
if self.client.source == "qobuz":
|
if self.client.source == "qobuz":
|
||||||
self.cover_url = self.resp["album"]["image"]["large"]
|
self.cover_url = self.resp["album"]["image"]["large"]
|
||||||
elif self.client.source == "tidal":
|
elif self.client.source == "tidal":
|
||||||
self.cover_url = tidal_cover_url(self.resp["album"]["cover"], 320)
|
self.cover_url = tidal_cover_url(
|
||||||
|
self.resp["album"]["cover"], 320
|
||||||
|
)
|
||||||
elif self.client.source == "deezer":
|
elif self.client.source == "deezer":
|
||||||
self.cover_url = self.resp["album"]["cover_medium"]
|
self.cover_url = self.resp["album"]["cover_medium"]
|
||||||
elif self.client.source == "soundcloud":
|
elif self.client.source == "soundcloud":
|
||||||
self.cover_url = (
|
self.cover_url = (
|
||||||
self.resp["artwork_url"] or self.resp["user"].get("avatar_url")
|
self.resp["artwork_url"]
|
||||||
|
or self.resp["user"].get("avatar_url")
|
||||||
).replace("large", "t500x500")
|
).replace("large", "t500x500")
|
||||||
else:
|
else:
|
||||||
raise InvalidSourceError(self.client.source)
|
raise InvalidSourceError(self.client.source)
|
||||||
|
@ -170,7 +175,9 @@ class Track:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self.download_cover() # only downloads for playlists and singles
|
self.download_cover() # only downloads for playlists and singles
|
||||||
self.path = os.path.join(gettempdir(), f"{hash(self.id)}_{self.quality}.tmp")
|
self.path = os.path.join(
|
||||||
|
gettempdir(), f"{hash(self.id)}_{self.quality}.tmp"
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def download(
|
def download(
|
||||||
|
@ -228,7 +235,8 @@ class Track:
|
||||||
) # downloads file
|
) # downloads file
|
||||||
except NonStreamable:
|
except NonStreamable:
|
||||||
click.secho(
|
click.secho(
|
||||||
f"Track {self!s} is not available for download, skipping.", fg="red"
|
f"Track {self!s} is not available for download, skipping.",
|
||||||
|
fg="red",
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -273,7 +281,11 @@ class Track:
|
||||||
:rtype: bool
|
:rtype: bool
|
||||||
"""
|
"""
|
||||||
return all(
|
return all(
|
||||||
(info.get("sampling_rate"), info.get("bit_depth"), not info.get("sample"))
|
(
|
||||||
|
info.get("sampling_rate"),
|
||||||
|
info.get("bit_depth"),
|
||||||
|
not info.get("sample"),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def move(self, path: str):
|
def move(self, path: str):
|
||||||
|
@ -340,13 +352,17 @@ class Track:
|
||||||
if not hasattr(self, "cover_url"):
|
if not hasattr(self, "cover_url"):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self.cover_path = os.path.join(gettempdir(), f"cover{hash(self.cover_url)}.jpg")
|
self.cover_path = os.path.join(
|
||||||
|
gettempdir(), f"cover{hash(self.cover_url)}.jpg"
|
||||||
|
)
|
||||||
logger.debug(f"Downloading cover from {self.cover_url}")
|
logger.debug(f"Downloading cover from {self.cover_url}")
|
||||||
# click.secho(f"\nDownloading cover art for {self!s}", fg="blue")
|
# click.secho(f"\nDownloading cover art for {self!s}", fg="blue")
|
||||||
|
|
||||||
if not os.path.exists(self.cover_path):
|
if not os.path.exists(self.cover_path):
|
||||||
tqdm_download(
|
tqdm_download(
|
||||||
self.cover_url, self.cover_path, desc=click.style("Cover", fg="cyan")
|
self.cover_url,
|
||||||
|
self.cover_path,
|
||||||
|
desc=click.style("Cover", fg="cyan"),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger.debug("Cover already exists, skipping download")
|
logger.debug("Cover already exists, skipping download")
|
||||||
|
@ -361,16 +377,18 @@ class Track:
|
||||||
formatter = self.meta.get_formatter()
|
formatter = self.meta.get_formatter()
|
||||||
logger.debug("Track meta formatter %s", formatter)
|
logger.debug("Track meta formatter %s", formatter)
|
||||||
filename = clean_format(self.file_format, formatter)
|
filename = clean_format(self.file_format, formatter)
|
||||||
self.final_path = os.path.join(self.folder, filename)[:250].strip() + ext(
|
self.final_path = os.path.join(self.folder, filename)[
|
||||||
self.quality, self.client.source
|
:250
|
||||||
)
|
].strip() + ext(self.quality, self.client.source)
|
||||||
|
|
||||||
logger.debug("Formatted path: %s", self.final_path)
|
logger.debug("Formatted path: %s", self.final_path)
|
||||||
|
|
||||||
return self.final_path
|
return self.final_path
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_album_meta(cls, album: TrackMetadata, track: dict, client: Client):
|
def from_album_meta(
|
||||||
|
cls, album: TrackMetadata, track: dict, client: Client
|
||||||
|
):
|
||||||
"""Return a new Track object initialized with info.
|
"""Return a new Track object initialized with info.
|
||||||
|
|
||||||
:param album: album metadata returned by API
|
:param album: album metadata returned by API
|
||||||
|
@ -438,16 +456,20 @@ class Track:
|
||||||
:param embed_cover: Embed cover art into file
|
:param embed_cover: Embed cover art into file
|
||||||
:type embed_cover: bool
|
:type embed_cover: bool
|
||||||
"""
|
"""
|
||||||
assert isinstance(self.meta, TrackMetadata), "meta must be TrackMetadata"
|
assert isinstance(
|
||||||
|
self.meta, TrackMetadata
|
||||||
|
), "meta must be TrackMetadata"
|
||||||
if not self.downloaded:
|
if not self.downloaded:
|
||||||
logger.info(
|
logger.info(
|
||||||
"Track %s not tagged because it was not downloaded", self["title"]
|
"Track %s not tagged because it was not downloaded",
|
||||||
|
self["title"],
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.tagged:
|
if self.tagged:
|
||||||
logger.info(
|
logger.info(
|
||||||
"Track %s not tagged because it is already tagged", self["title"]
|
"Track %s not tagged because it is already tagged",
|
||||||
|
self["title"],
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -532,7 +554,9 @@ class Track:
|
||||||
"""
|
"""
|
||||||
if not self.downloaded:
|
if not self.downloaded:
|
||||||
logger.debug("Track not downloaded, skipping conversion")
|
logger.debug("Track not downloaded, skipping conversion")
|
||||||
click.secho("Track not downloaded, skipping conversion", fg="magenta")
|
click.secho(
|
||||||
|
"Track not downloaded, skipping conversion", fg="magenta"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
CONV_CLASS = {
|
CONV_CLASS = {
|
||||||
|
@ -551,8 +575,12 @@ class Track:
|
||||||
self.format_final_path()
|
self.format_final_path()
|
||||||
|
|
||||||
if not os.path.isfile(self.path):
|
if not os.path.isfile(self.path):
|
||||||
logger.info("File %s does not exist. Skipping conversion.", self.path)
|
logger.info(
|
||||||
click.secho(f"{self!s} does not exist. Skipping conversion.", fg="red")
|
"File %s does not exist. Skipping conversion.", self.path
|
||||||
|
)
|
||||||
|
click.secho(
|
||||||
|
f"{self!s} does not exist. Skipping conversion.", fg="red"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
|
@ -671,13 +699,23 @@ class Video:
|
||||||
:param kwargs:
|
:param kwargs:
|
||||||
"""
|
"""
|
||||||
click.secho(
|
click.secho(
|
||||||
f"Downloading {self.title} (Video). This may take a while.", fg="blue"
|
f"Downloading {self.title} (Video). This may take a while.",
|
||||||
|
fg="blue",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.parent_folder = kwargs.get("parent_folder", "StreamripDownloads")
|
self.parent_folder = kwargs.get("parent_folder", "StreamripDownloads")
|
||||||
url = self.client.get_file_url(self.id, video=True)
|
url = self.client.get_file_url(self.id, video=True)
|
||||||
# it's more convenient to have ffmpeg download the hls
|
# it's more convenient to have ffmpeg download the hls
|
||||||
command = ["ffmpeg", "-i", url, "-c", "copy", "-loglevel", "panic", self.path]
|
command = [
|
||||||
|
"ffmpeg",
|
||||||
|
"-i",
|
||||||
|
url,
|
||||||
|
"-c",
|
||||||
|
"copy",
|
||||||
|
"-loglevel",
|
||||||
|
"panic",
|
||||||
|
self.path,
|
||||||
|
]
|
||||||
p = subprocess.Popen(command)
|
p = subprocess.Popen(command)
|
||||||
p.wait() # remove this?
|
p.wait() # remove this?
|
||||||
|
|
||||||
|
@ -809,7 +847,9 @@ class Tracklist(list):
|
||||||
# Tidal errors out with unlimited concurrency
|
# Tidal errors out with unlimited concurrency
|
||||||
# max_workers = 15 if self.client.source == "tidal" else 90
|
# max_workers = 15 if self.client.source == "tidal" else 90
|
||||||
with concurrent.futures.ThreadPoolExecutor(15) as executor:
|
with concurrent.futures.ThreadPoolExecutor(15) as executor:
|
||||||
futures = [executor.submit(target, item, **kwargs) for item in self]
|
futures = [
|
||||||
|
executor.submit(target, item, **kwargs) for item in self
|
||||||
|
]
|
||||||
try:
|
try:
|
||||||
concurrent.futures.wait(futures)
|
concurrent.futures.wait(futures)
|
||||||
except (KeyboardInterrupt, SystemExit):
|
except (KeyboardInterrupt, SystemExit):
|
||||||
|
@ -1079,7 +1119,8 @@ class YoutubeVideo:
|
||||||
"-q",
|
"-q",
|
||||||
"-o",
|
"-o",
|
||||||
os.path.join(
|
os.path.join(
|
||||||
youtube_video_downloads_folder, "%(title)s.%(container)s"
|
youtube_video_downloads_folder,
|
||||||
|
"%(title)s.%(container)s",
|
||||||
),
|
),
|
||||||
self.url,
|
self.url,
|
||||||
]
|
]
|
||||||
|
|
|
@ -24,9 +24,14 @@ if not os.path.isdir(CACHE_DIR):
|
||||||
|
|
||||||
|
|
||||||
@click.group(invoke_without_command=True)
|
@click.group(invoke_without_command=True)
|
||||||
@click.option("-c", "--convert", metavar="CODEC", help="alac, mp3, flac, or ogg")
|
|
||||||
@click.option(
|
@click.option(
|
||||||
"-u", "--urls", metavar="URLS", help="Url from Qobuz, Tidal, SoundCloud, or Deezer"
|
"-c", "--convert", metavar="CODEC", help="alac, mp3, flac, or ogg"
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-u",
|
||||||
|
"--urls",
|
||||||
|
metavar="URLS",
|
||||||
|
help="Url from Qobuz, Tidal, SoundCloud, or Deezer",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"-q",
|
"-q",
|
||||||
|
@ -70,7 +75,9 @@ def cli(ctx, **kwargs):
|
||||||
|
|
||||||
r = requests.get("https://pypi.org/pypi/streamrip/json").json()
|
r = requests.get("https://pypi.org/pypi/streamrip/json").json()
|
||||||
newest = r["info"]["version"]
|
newest = r["info"]["version"]
|
||||||
if version.parse(metadata.version("streamrip")) < version.parse(newest):
|
if version.parse(metadata.version("streamrip")) < version.parse(
|
||||||
|
newest
|
||||||
|
):
|
||||||
click.secho(
|
click.secho(
|
||||||
"A new version of streamrip is available! "
|
"A new version of streamrip is available! "
|
||||||
"Run `pip3 install streamrip --upgrade` to update.",
|
"Run `pip3 install streamrip --upgrade` to update.",
|
||||||
|
@ -138,9 +145,14 @@ def filter_discography(ctx, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("-t", "--type", default="album", help="album, playlist, track, or artist")
|
|
||||||
@click.option(
|
@click.option(
|
||||||
"-s", "--source", default="qobuz", help="qobuz, tidal, soundcloud, or deezer"
|
"-t", "--type", default="album", help="album, playlist, track, or artist"
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-s",
|
||||||
|
"--source",
|
||||||
|
default="qobuz",
|
||||||
|
help="qobuz, tidal, soundcloud, or deezer",
|
||||||
)
|
)
|
||||||
@click.argument("QUERY", nargs=-1)
|
@click.argument("QUERY", nargs=-1)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
|
@ -228,7 +240,9 @@ def discover(ctx, **kwargs):
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option(
|
@click.option(
|
||||||
"-s", "--source", help="Qobuz, Tidal, Deezer, or SoundCloud. Default: Qobuz."
|
"-s",
|
||||||
|
"--source",
|
||||||
|
help="Qobuz, Tidal, Deezer, or SoundCloud. Default: Qobuz.",
|
||||||
)
|
)
|
||||||
@click.argument("URL")
|
@click.argument("URL")
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
|
@ -254,7 +268,9 @@ def lastfm(ctx, source, url):
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("-o", "--open", is_flag=True, help="Open the config file")
|
@click.option("-o", "--open", is_flag=True, help="Open the config file")
|
||||||
@click.option("-d", "--directory", is_flag=True, help="Open the config directory")
|
@click.option(
|
||||||
|
"-d", "--directory", is_flag=True, help="Open the config directory"
|
||||||
|
)
|
||||||
@click.option("-q", "--qobuz", is_flag=True, help="Set Qobuz credentials")
|
@click.option("-q", "--qobuz", is_flag=True, help="Set Qobuz credentials")
|
||||||
@click.option("-t", "--tidal", is_flag=True, help="Re-login into Tidal")
|
@click.option("-t", "--tidal", is_flag=True, help="Re-login into Tidal")
|
||||||
@click.option("--reset", is_flag=True, help="RESET the config file")
|
@click.option("--reset", is_flag=True, help="RESET the config file")
|
||||||
|
@ -274,7 +290,9 @@ def config(ctx, **kwargs):
|
||||||
click.launch(config_dir)
|
click.launch(config_dir)
|
||||||
|
|
||||||
if kwargs["qobuz"]:
|
if kwargs["qobuz"]:
|
||||||
config.file["qobuz"]["email"] = input(click.style("Qobuz email: ", fg="blue"))
|
config.file["qobuz"]["email"] = input(
|
||||||
|
click.style("Qobuz email: ", fg="blue")
|
||||||
|
)
|
||||||
|
|
||||||
click.secho("Qobuz password (will not show on screen):", fg="blue")
|
click.secho("Qobuz password (will not show on screen):", fg="blue")
|
||||||
config.file["qobuz"]["password"] = md5(
|
config.file["qobuz"]["password"] = md5(
|
||||||
|
@ -282,7 +300,9 @@ def config(ctx, **kwargs):
|
||||||
).hexdigest()
|
).hexdigest()
|
||||||
|
|
||||||
config.save()
|
config.save()
|
||||||
click.secho("Qobuz credentials hashed and saved to config.", fg="green")
|
click.secho(
|
||||||
|
"Qobuz credentials hashed and saved to config.", fg="green"
|
||||||
|
)
|
||||||
|
|
||||||
if kwargs["tidal"]:
|
if kwargs["tidal"]:
|
||||||
client = TidalClient()
|
client = TidalClient()
|
||||||
|
|
|
@ -216,7 +216,9 @@ class QobuzClient(Client):
|
||||||
:rtype: dict
|
:rtype: dict
|
||||||
"""
|
"""
|
||||||
page, status_code = self._api_request(epoint, params)
|
page, status_code = self._api_request(epoint, params)
|
||||||
logger.debug("Keys returned from _gen_pages: %s", ", ".join(page.keys()))
|
logger.debug(
|
||||||
|
"Keys returned from _gen_pages: %s", ", ".join(page.keys())
|
||||||
|
)
|
||||||
key = epoint.split("/")[0] + "s"
|
key = epoint.split("/")[0] + "s"
|
||||||
total = page.get(key, {})
|
total = page.get(key, {})
|
||||||
total = total.get("total") or total.get("items")
|
total = total.get("total") or total.get("items")
|
||||||
|
@ -240,7 +242,9 @@ class QobuzClient(Client):
|
||||||
for secret in self.secrets:
|
for secret in self.secrets:
|
||||||
if self._test_secret(secret):
|
if self._test_secret(secret):
|
||||||
self.sec = secret
|
self.sec = secret
|
||||||
logger.debug("Working secret and app_id: %s - %s", secret, self.app_id)
|
logger.debug(
|
||||||
|
"Working secret and app_id: %s - %s", secret, self.app_id
|
||||||
|
)
|
||||||
break
|
break
|
||||||
if not hasattr(self, "sec"):
|
if not hasattr(self, "sec"):
|
||||||
raise InvalidAppSecretError(f"Invalid secrets: {self.secrets}")
|
raise InvalidAppSecretError(f"Invalid secrets: {self.secrets}")
|
||||||
|
@ -274,11 +278,15 @@ class QobuzClient(Client):
|
||||||
|
|
||||||
response, status_code = self._api_request(epoint, params)
|
response, status_code = self._api_request(epoint, params)
|
||||||
if status_code != 200:
|
if status_code != 200:
|
||||||
raise Exception(f'Error fetching metadata. "{response["message"]}"')
|
raise Exception(
|
||||||
|
f'Error fetching metadata. "{response["message"]}"'
|
||||||
|
)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def _api_search(self, query: str, media_type: str, limit: int = 500) -> Generator:
|
def _api_search(
|
||||||
|
self, query: str, media_type: str, limit: int = 500
|
||||||
|
) -> Generator:
|
||||||
"""Send a search request to the API.
|
"""Send a search request to the API.
|
||||||
|
|
||||||
:param query:
|
:param query:
|
||||||
|
@ -330,14 +338,18 @@ class QobuzClient(Client):
|
||||||
resp, status_code = self._api_request(epoint, params)
|
resp, status_code = self._api_request(epoint, params)
|
||||||
|
|
||||||
if status_code == 401:
|
if status_code == 401:
|
||||||
raise AuthenticationError(f"Invalid credentials from params {params}")
|
raise AuthenticationError(
|
||||||
|
f"Invalid credentials from params {params}"
|
||||||
|
)
|
||||||
elif status_code == 400:
|
elif status_code == 400:
|
||||||
raise InvalidAppIdError(f"Invalid app id from params {params}")
|
raise InvalidAppIdError(f"Invalid app id from params {params}")
|
||||||
else:
|
else:
|
||||||
logger.info("Logged in to Qobuz")
|
logger.info("Logged in to Qobuz")
|
||||||
|
|
||||||
if not resp["user"]["credential"]["parameters"]:
|
if not resp["user"]["credential"]["parameters"]:
|
||||||
raise IneligibleError("Free accounts are not eligible to download tracks.")
|
raise IneligibleError(
|
||||||
|
"Free accounts are not eligible to download tracks."
|
||||||
|
)
|
||||||
|
|
||||||
self.uat = resp["user_auth_token"]
|
self.uat = resp["user_auth_token"]
|
||||||
self.session.headers.update({"X-User-Auth-Token": self.uat})
|
self.session.headers.update({"X-User-Auth-Token": self.uat})
|
||||||
|
@ -386,7 +398,9 @@ class QobuzClient(Client):
|
||||||
}
|
}
|
||||||
response, status_code = self._api_request("track/getFileUrl", params)
|
response, status_code = self._api_request("track/getFileUrl", params)
|
||||||
if status_code == 400:
|
if status_code == 400:
|
||||||
raise InvalidAppSecretError("Invalid app secret from params %s" % params)
|
raise InvalidAppSecretError(
|
||||||
|
"Invalid app secret from params %s" % params
|
||||||
|
)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@ -404,7 +418,9 @@ class QobuzClient(Client):
|
||||||
try:
|
try:
|
||||||
return r.json(), r.status_code
|
return r.json(), r.status_code
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error("Problem getting JSON. Status code: %s", r.status_code)
|
logger.error(
|
||||||
|
"Problem getting JSON. Status code: %s", r.status_code
|
||||||
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _test_secret(self, secret: str) -> bool:
|
def _test_secret(self, secret: str) -> bool:
|
||||||
|
@ -435,7 +451,9 @@ class DeezerClient(Client):
|
||||||
# no login required
|
# no login required
|
||||||
self.logged_in = True
|
self.logged_in = True
|
||||||
|
|
||||||
def search(self, query: str, media_type: str = "album", limit: int = 200) -> dict:
|
def search(
|
||||||
|
self, query: str, media_type: str = "album", limit: int = 200
|
||||||
|
) -> dict:
|
||||||
"""Search API for query.
|
"""Search API for query.
|
||||||
|
|
||||||
:param query:
|
:param query:
|
||||||
|
@ -472,7 +490,9 @@ class DeezerClient(Client):
|
||||||
url = f"{DEEZER_BASE}/{media_type}/{meta_id}"
|
url = f"{DEEZER_BASE}/{media_type}/{meta_id}"
|
||||||
item = self.session.get(url).json()
|
item = self.session.get(url).json()
|
||||||
if media_type in ("album", "playlist"):
|
if media_type in ("album", "playlist"):
|
||||||
tracks = self.session.get(f"{url}/tracks", params={"limit": 1000}).json()
|
tracks = self.session.get(
|
||||||
|
f"{url}/tracks", params={"limit": 1000}
|
||||||
|
).json()
|
||||||
item["tracks"] = tracks["data"]
|
item["tracks"] = tracks["data"]
|
||||||
item["track_total"] = len(tracks["data"])
|
item["track_total"] = len(tracks["data"])
|
||||||
elif media_type == "artist":
|
elif media_type == "artist":
|
||||||
|
@ -568,7 +588,9 @@ class TidalClient(Client):
|
||||||
logger.debug(resp)
|
logger.debug(resp)
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
def search(self, query: str, media_type: str = "album", limit: int = 100) -> dict:
|
def search(
|
||||||
|
self, query: str, media_type: str = "album", limit: int = 100
|
||||||
|
) -> dict:
|
||||||
"""Search for a query.
|
"""Search for a query.
|
||||||
|
|
||||||
:param query:
|
:param query:
|
||||||
|
@ -597,13 +619,19 @@ class TidalClient(Client):
|
||||||
return self._get_video_stream_url(track_id)
|
return self._get_video_stream_url(track_id)
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
"audioquality": get_quality(min(quality, TIDAL_MAX_Q), self.source),
|
"audioquality": get_quality(
|
||||||
|
min(quality, TIDAL_MAX_Q), self.source
|
||||||
|
),
|
||||||
"playbackmode": "STREAM",
|
"playbackmode": "STREAM",
|
||||||
"assetpresentation": "FULL",
|
"assetpresentation": "FULL",
|
||||||
}
|
}
|
||||||
resp = self._api_request(f"tracks/{track_id}/playbackinfopostpaywall", params)
|
resp = self._api_request(
|
||||||
|
f"tracks/{track_id}/playbackinfopostpaywall", params
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
manifest = json.loads(base64.b64decode(resp["manifest"]).decode("utf-8"))
|
manifest = json.loads(
|
||||||
|
base64.b64decode(resp["manifest"]).decode("utf-8")
|
||||||
|
)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise Exception(resp["userMessage"])
|
raise Exception(resp["userMessage"])
|
||||||
|
|
||||||
|
@ -641,7 +669,8 @@ class TidalClient(Client):
|
||||||
login_link = f"https://{self._get_device_code()}"
|
login_link = f"https://{self._get_device_code()}"
|
||||||
|
|
||||||
click.secho(
|
click.secho(
|
||||||
f"Go to {login_link} to log into Tidal within 5 minutes.", fg="blue"
|
f"Go to {login_link} to log into Tidal within 5 minutes.",
|
||||||
|
fg="blue",
|
||||||
)
|
)
|
||||||
if launch:
|
if launch:
|
||||||
click.launch(login_link)
|
click.launch(login_link)
|
||||||
|
@ -808,7 +837,9 @@ class TidalClient(Client):
|
||||||
offset += 100
|
offset += 100
|
||||||
tracks_left -= 100
|
tracks_left -= 100
|
||||||
resp["items"].extend(
|
resp["items"].extend(
|
||||||
self._api_request(f"{url}/items", {"offset": offset})["items"]
|
self._api_request(f"{url}/items", {"offset": offset})[
|
||||||
|
"items"
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
item["tracks"] = [item["item"] for item in resp["items"]]
|
item["tracks"] = [item["item"] for item in resp["items"]]
|
||||||
|
@ -853,7 +884,9 @@ class TidalClient(Client):
|
||||||
r'#EXT-X-STREAM-INF:BANDWIDTH=\d+,AVERAGE-BANDWIDTH=\d+,CODECS="[^"]+"'
|
r'#EXT-X-STREAM-INF:BANDWIDTH=\d+,AVERAGE-BANDWIDTH=\d+,CODECS="[^"]+"'
|
||||||
r",RESOLUTION=\d+x\d+\n(.+)"
|
r",RESOLUTION=\d+x\d+\n(.+)"
|
||||||
)
|
)
|
||||||
manifest = json.loads(base64.b64decode(resp["manifest"]).decode("utf-8"))
|
manifest = json.loads(
|
||||||
|
base64.b64decode(resp["manifest"]).decode("utf-8")
|
||||||
|
)
|
||||||
available_urls = self.session.get(manifest["urls"][0])
|
available_urls = self.session.get(manifest["urls"][0])
|
||||||
url_info = re.findall(stream_url_regex, available_urls.text)
|
url_info = re.findall(stream_url_regex, available_urls.text)
|
||||||
|
|
||||||
|
@ -892,7 +925,10 @@ class SoundCloudClient(Client):
|
||||||
:param id:
|
:param id:
|
||||||
:param media_type:
|
:param media_type:
|
||||||
"""
|
"""
|
||||||
assert media_type in ("track", "playlist"), f"{media_type} not supported"
|
assert media_type in (
|
||||||
|
"track",
|
||||||
|
"playlist",
|
||||||
|
), f"{media_type} not supported"
|
||||||
|
|
||||||
if "http" in str(id):
|
if "http" in str(id):
|
||||||
resp, _ = self._get(f"resolve?url={id}")
|
resp, _ = self._get(f"resolve?url={id}")
|
||||||
|
@ -929,7 +965,10 @@ class SoundCloudClient(Client):
|
||||||
url = None
|
url = None
|
||||||
for tc in track["media"]["transcodings"]:
|
for tc in track["media"]["transcodings"]:
|
||||||
fmt = tc["format"]
|
fmt = tc["format"]
|
||||||
if fmt["protocol"] == "hls" and fmt["mime_type"] == "audio/mpeg":
|
if (
|
||||||
|
fmt["protocol"] == "hls"
|
||||||
|
and fmt["mime_type"] == "audio/mpeg"
|
||||||
|
):
|
||||||
url = tc["url"]
|
url = tc["url"]
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
|
@ -331,7 +331,11 @@ class ConfigDocumentation:
|
||||||
:type path: str
|
:type path: str
|
||||||
"""
|
"""
|
||||||
with open(path, "r") as f:
|
with open(path, "r") as f:
|
||||||
lines = [line for line in f.readlines() if not line.strip().startswith("#")]
|
lines = [
|
||||||
|
line
|
||||||
|
for line in f.readlines()
|
||||||
|
if not line.strip().startswith("#")
|
||||||
|
]
|
||||||
|
|
||||||
with open(path, "w") as f:
|
with open(path, "w") as f:
|
||||||
f.write("".join(lines))
|
f.write("".join(lines))
|
||||||
|
|
|
@ -19,7 +19,9 @@ DOWNLOADS_DIR = os.path.join(HOME, "StreamripDownloads")
|
||||||
|
|
||||||
AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0"
|
AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:83.0) Gecko/20100101 Firefox/83.0"
|
||||||
|
|
||||||
TIDAL_COVER_URL = "https://resources.tidal.com/images/{uuid}/{width}x{height}.jpg"
|
TIDAL_COVER_URL = (
|
||||||
|
"https://resources.tidal.com/images/{uuid}/{width}x{height}.jpg"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
QUALITY_DESC = {
|
QUALITY_DESC = {
|
||||||
|
@ -142,9 +144,7 @@ ALBUM_KEYS = (
|
||||||
"composer",
|
"composer",
|
||||||
)
|
)
|
||||||
# TODO: rename these to DEFAULT_FOLDER_FORMAT etc
|
# TODO: rename these to DEFAULT_FOLDER_FORMAT etc
|
||||||
FOLDER_FORMAT = (
|
FOLDER_FORMAT = "{albumartist} - {title} ({year}) [{container}] [{bit_depth}B-{sampling_rate}kHz]"
|
||||||
"{albumartist} - {title} ({year}) [{container}] [{bit_depth}B-{sampling_rate}kHz]"
|
|
||||||
)
|
|
||||||
TRACK_FORMAT = "{tracknumber}. {artist} - {title}"
|
TRACK_FORMAT = "{tracknumber}. {artist} - {title}"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -52,7 +52,9 @@ class Converter:
|
||||||
|
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.final_fn = f"{os.path.splitext(filename)[0]}.{self.container}"
|
self.final_fn = f"{os.path.splitext(filename)[0]}.{self.container}"
|
||||||
self.tempfile = os.path.join(gettempdir(), os.path.basename(self.final_fn))
|
self.tempfile = os.path.join(
|
||||||
|
gettempdir(), os.path.basename(self.final_fn)
|
||||||
|
)
|
||||||
self.remove_source = remove_source
|
self.remove_source = remove_source
|
||||||
self.sampling_rate = sampling_rate
|
self.sampling_rate = sampling_rate
|
||||||
self.bit_depth = bit_depth
|
self.bit_depth = bit_depth
|
||||||
|
@ -115,9 +117,13 @@ class Converter:
|
||||||
if self.lossless:
|
if self.lossless:
|
||||||
if isinstance(self.sampling_rate, int):
|
if isinstance(self.sampling_rate, int):
|
||||||
sampling_rates = "|".join(
|
sampling_rates = "|".join(
|
||||||
str(rate) for rate in SAMPLING_RATES if rate <= self.sampling_rate
|
str(rate)
|
||||||
|
for rate in SAMPLING_RATES
|
||||||
|
if rate <= self.sampling_rate
|
||||||
|
)
|
||||||
|
command.extend(
|
||||||
|
["-af", f"aformat=sample_rates={sampling_rates}"]
|
||||||
)
|
)
|
||||||
command.extend(["-af", f"aformat=sample_rates={sampling_rates}"])
|
|
||||||
|
|
||||||
elif self.sampling_rate is not None:
|
elif self.sampling_rate is not None:
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
|
@ -132,7 +138,9 @@ class Converter:
|
||||||
else:
|
else:
|
||||||
raise ValueError("Bit depth must be 16, 24, or 32")
|
raise ValueError("Bit depth must be 16, 24, or 32")
|
||||||
elif self.bit_depth is not None:
|
elif self.bit_depth is not None:
|
||||||
raise TypeError(f"Bit depth must be int, not {type(self.bit_depth)}")
|
raise TypeError(
|
||||||
|
f"Bit depth must be int, not {type(self.bit_depth)}"
|
||||||
|
)
|
||||||
|
|
||||||
# automatically overwrite
|
# automatically overwrite
|
||||||
command.extend(["-y", self.tempfile])
|
command.extend(["-y", self.tempfile])
|
||||||
|
@ -195,7 +203,9 @@ class Vorbis(Converter):
|
||||||
codec_name = "vorbis"
|
codec_name = "vorbis"
|
||||||
codec_lib = "libvorbis"
|
codec_lib = "libvorbis"
|
||||||
container = "ogg"
|
container = "ogg"
|
||||||
default_ffmpeg_arg = "-q:a 6" # 160, aka the "high" quality profile from Spotify
|
default_ffmpeg_arg = (
|
||||||
|
"-q:a 6" # 160, aka the "high" quality profile from Spotify
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class OPUS(Converter):
|
class OPUS(Converter):
|
||||||
|
|
|
@ -48,7 +48,12 @@ logger = logging.getLogger("streamrip")
|
||||||
|
|
||||||
|
|
||||||
Media = Union[
|
Media = Union[
|
||||||
Type[Album], Type[Playlist], Type[Artist], Type[Track], Type[Label], Type[Video]
|
Type[Album],
|
||||||
|
Type[Playlist],
|
||||||
|
Type[Artist],
|
||||||
|
Type[Track],
|
||||||
|
Type[Label],
|
||||||
|
Type[Video],
|
||||||
]
|
]
|
||||||
MEDIA_CLASS: Dict[str, Media] = {
|
MEDIA_CLASS: Dict[str, Media] = {
|
||||||
"album": Album,
|
"album": Album,
|
||||||
|
@ -175,18 +180,24 @@ class MusicDL(list):
|
||||||
"track_format": self.config.session["path_format"]["track"],
|
"track_format": self.config.session["path_format"]["track"],
|
||||||
"embed_cover": self.config.session["artwork"]["embed"],
|
"embed_cover": self.config.session["artwork"]["embed"],
|
||||||
"embed_cover_size": self.config.session["artwork"]["size"],
|
"embed_cover_size": self.config.session["artwork"]["size"],
|
||||||
"keep_hires_cover": self.config.session["artwork"]["keep_hires_cover"],
|
"keep_hires_cover": self.config.session["artwork"][
|
||||||
|
"keep_hires_cover"
|
||||||
|
],
|
||||||
"set_playlist_to_album": self.config.session["metadata"][
|
"set_playlist_to_album": self.config.session["metadata"][
|
||||||
"set_playlist_to_album"
|
"set_playlist_to_album"
|
||||||
],
|
],
|
||||||
"stay_temp": self.config.session["conversion"]["enabled"],
|
"stay_temp": self.config.session["conversion"]["enabled"],
|
||||||
"conversion": self.config.session["conversion"],
|
"conversion": self.config.session["conversion"],
|
||||||
"concurrent_downloads": self.config.session["concurrent_downloads"],
|
"concurrent_downloads": self.config.session[
|
||||||
|
"concurrent_downloads"
|
||||||
|
],
|
||||||
"new_tracknumbers": self.config.session["metadata"][
|
"new_tracknumbers": self.config.session["metadata"][
|
||||||
"new_playlist_tracknumbers"
|
"new_playlist_tracknumbers"
|
||||||
],
|
],
|
||||||
"download_videos": self.config.session["tidal"]["download_videos"],
|
"download_videos": self.config.session["tidal"]["download_videos"],
|
||||||
"download_booklets": self.config.session["qobuz"]["download_booklets"],
|
"download_booklets": self.config.session["qobuz"][
|
||||||
|
"download_booklets"
|
||||||
|
],
|
||||||
"download_youtube_videos": self.config.session["youtube"][
|
"download_youtube_videos": self.config.session["youtube"][
|
||||||
"download_videos"
|
"download_videos"
|
||||||
],
|
],
|
||||||
|
@ -209,7 +220,9 @@ class MusicDL(list):
|
||||||
|
|
||||||
logger.debug("Arguments from config: %s", arguments)
|
logger.debug("Arguments from config: %s", arguments)
|
||||||
|
|
||||||
source_subdirs = self.config.session["downloads"]["source_subdirectories"]
|
source_subdirs = self.config.session["downloads"][
|
||||||
|
"source_subdirectories"
|
||||||
|
]
|
||||||
for item in self:
|
for item in self:
|
||||||
if source_subdirs:
|
if source_subdirs:
|
||||||
arguments["parent_folder"] = self.__get_source_subdir(
|
arguments["parent_folder"] = self.__get_source_subdir(
|
||||||
|
@ -220,20 +233,26 @@ class MusicDL(list):
|
||||||
item.download(**arguments)
|
item.download(**arguments)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
arguments["quality"] = self.config.session[item.client.source]["quality"]
|
arguments["quality"] = self.config.session[item.client.source][
|
||||||
|
"quality"
|
||||||
|
]
|
||||||
if isinstance(item, Artist):
|
if isinstance(item, Artist):
|
||||||
filters_ = tuple(
|
filters_ = tuple(
|
||||||
k for k, v in self.config.session["filters"].items() if v
|
k for k, v in self.config.session["filters"].items() if v
|
||||||
)
|
)
|
||||||
arguments["filters"] = filters_
|
arguments["filters"] = filters_
|
||||||
logger.debug("Added filter argument for artist/label: %s", filters_)
|
logger.debug(
|
||||||
|
"Added filter argument for artist/label: %s", filters_
|
||||||
|
)
|
||||||
|
|
||||||
if not (isinstance(item, Tracklist) and item.loaded):
|
if not (isinstance(item, Tracklist) and item.loaded):
|
||||||
logger.debug("Loading metadata")
|
logger.debug("Loading metadata")
|
||||||
try:
|
try:
|
||||||
item.load_meta()
|
item.load_meta()
|
||||||
except NonStreamable:
|
except NonStreamable:
|
||||||
click.secho(f"{item!s} is not available, skipping.", fg="red")
|
click.secho(
|
||||||
|
f"{item!s} is not available, skipping.", fg="red"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
item.download(**arguments)
|
item.download(**arguments)
|
||||||
|
@ -317,7 +336,9 @@ class MusicDL(list):
|
||||||
|
|
||||||
parsed.extend(self.url_parse.findall(url)) # Qobuz, Tidal, Dezer
|
parsed.extend(self.url_parse.findall(url)) # Qobuz, Tidal, Dezer
|
||||||
soundcloud_urls = self.soundcloud_url_parse.findall(url)
|
soundcloud_urls = self.soundcloud_url_parse.findall(url)
|
||||||
soundcloud_items = [self.clients["soundcloud"].get(u) for u in soundcloud_urls]
|
soundcloud_items = [
|
||||||
|
self.clients["soundcloud"].get(u) for u in soundcloud_urls
|
||||||
|
]
|
||||||
|
|
||||||
parsed.extend(
|
parsed.extend(
|
||||||
("soundcloud", item["kind"], url)
|
("soundcloud", item["kind"], url)
|
||||||
|
@ -349,11 +370,15 @@ class MusicDL(list):
|
||||||
|
|
||||||
# For testing:
|
# For testing:
|
||||||
# https://www.last.fm/user/nathan3895/playlists/12058911
|
# https://www.last.fm/user/nathan3895/playlists/12058911
|
||||||
user_regex = re.compile(r"https://www\.last\.fm/user/([^/]+)/playlists/\d+")
|
user_regex = re.compile(
|
||||||
|
r"https://www\.last\.fm/user/([^/]+)/playlists/\d+"
|
||||||
|
)
|
||||||
lastfm_urls = self.lastfm_url_parse.findall(urls)
|
lastfm_urls = self.lastfm_url_parse.findall(urls)
|
||||||
try:
|
try:
|
||||||
lastfm_source = self.config.session["lastfm"]["source"]
|
lastfm_source = self.config.session["lastfm"]["source"]
|
||||||
lastfm_fallback_source = self.config.session["lastfm"]["fallback_source"]
|
lastfm_fallback_source = self.config.session["lastfm"][
|
||||||
|
"fallback_source"
|
||||||
|
]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
self._config_updating_message()
|
self._config_updating_message()
|
||||||
self.config.update()
|
self.config.update()
|
||||||
|
@ -383,7 +408,9 @@ class MusicDL(list):
|
||||||
except (NoResultsFound, StopIteration):
|
except (NoResultsFound, StopIteration):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
track = try_search(lastfm_source) or try_search(lastfm_fallback_source)
|
track = try_search(lastfm_source) or try_search(
|
||||||
|
lastfm_fallback_source
|
||||||
|
)
|
||||||
if track is None:
|
if track is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -405,7 +432,9 @@ class MusicDL(list):
|
||||||
pl.creator = creator_match.group(1)
|
pl.creator = creator_match.group(1)
|
||||||
|
|
||||||
tracks_not_found = 0
|
tracks_not_found = 0
|
||||||
with concurrent.futures.ThreadPoolExecutor(max_workers=15) as executor:
|
with concurrent.futures.ThreadPoolExecutor(
|
||||||
|
max_workers=15
|
||||||
|
) as executor:
|
||||||
futures = [
|
futures = [
|
||||||
executor.submit(search_query, title, artist, pl)
|
executor.submit(search_query, title, artist, pl)
|
||||||
for title, artist in queries
|
for title, artist in queries
|
||||||
|
@ -422,7 +451,9 @@ class MusicDL(list):
|
||||||
pl.loaded = True
|
pl.loaded = True
|
||||||
|
|
||||||
if tracks_not_found > 0:
|
if tracks_not_found > 0:
|
||||||
click.secho(f"{tracks_not_found} tracks not found.", fg="yellow")
|
click.secho(
|
||||||
|
f"{tracks_not_found} tracks not found.", fg="yellow"
|
||||||
|
)
|
||||||
self.append(pl)
|
self.append(pl)
|
||||||
|
|
||||||
def handle_txt(self, filepath: Union[str, os.PathLike]):
|
def handle_txt(self, filepath: Union[str, os.PathLike]):
|
||||||
|
@ -438,7 +469,11 @@ class MusicDL(list):
|
||||||
self.handle_urls(txt.read())
|
self.handle_urls(txt.read())
|
||||||
|
|
||||||
def search(
|
def search(
|
||||||
self, source: str, query: str, media_type: str = "album", limit: int = 200
|
self,
|
||||||
|
source: str,
|
||||||
|
query: str,
|
||||||
|
media_type: str = "album",
|
||||||
|
limit: int = 200,
|
||||||
) -> Generator:
|
) -> Generator:
|
||||||
"""Universal search.
|
"""Universal search.
|
||||||
|
|
||||||
|
@ -473,7 +508,9 @@ class MusicDL(list):
|
||||||
else:
|
else:
|
||||||
logger.debug("Not generator")
|
logger.debug("Not generator")
|
||||||
items = (
|
items = (
|
||||||
results.get("data") or results.get("items") or results.get("collection")
|
results.get("data")
|
||||||
|
or results.get("items")
|
||||||
|
or results.get("collection")
|
||||||
)
|
)
|
||||||
if items is None:
|
if items is None:
|
||||||
raise NoResultsFound(query)
|
raise NoResultsFound(query)
|
||||||
|
@ -513,7 +550,9 @@ class MusicDL(list):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
fields = (fname for _, fname, _, _ in Formatter().parse(fmt) if fname)
|
fields = (fname for _, fname, _, _ in Formatter().parse(fmt) if fname)
|
||||||
ret = fmt.format(**{k: media.get(k, default="Unknown") for k in fields})
|
ret = fmt.format(
|
||||||
|
**{k: media.get(k, default="Unknown") for k in fields}
|
||||||
|
)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def interactive_search( # noqa
|
def interactive_search( # noqa
|
||||||
|
@ -646,7 +685,8 @@ class MusicDL(list):
|
||||||
remaining_tracks = total_tracks - 50
|
remaining_tracks = total_tracks - 50
|
||||||
|
|
||||||
playlist_title_match = re.search(
|
playlist_title_match = re.search(
|
||||||
r'<h1 class="playlisting-playlist-header-title">([^<]+)</h1>', r.text
|
r'<h1 class="playlisting-playlist-header-title">([^<]+)</h1>',
|
||||||
|
r.text,
|
||||||
)
|
)
|
||||||
if playlist_title_match is None:
|
if playlist_title_match is None:
|
||||||
raise ParsingError("Error finding title from response")
|
raise ParsingError("Error finding title from response")
|
||||||
|
@ -654,7 +694,9 @@ class MusicDL(list):
|
||||||
playlist_title = html.unescape(playlist_title_match.group(1))
|
playlist_title = html.unescape(playlist_title_match.group(1))
|
||||||
|
|
||||||
if remaining_tracks > 0:
|
if remaining_tracks > 0:
|
||||||
with concurrent.futures.ThreadPoolExecutor(max_workers=15) as executor:
|
with concurrent.futures.ThreadPoolExecutor(
|
||||||
|
max_workers=15
|
||||||
|
) as executor:
|
||||||
last_page = int(remaining_tracks // 50) + int(
|
last_page = int(remaining_tracks // 50) + int(
|
||||||
remaining_tracks % 50 != 0
|
remaining_tracks % 50 != 0
|
||||||
)
|
)
|
||||||
|
@ -695,7 +737,9 @@ class MusicDL(list):
|
||||||
).hexdigest()
|
).hexdigest()
|
||||||
|
|
||||||
self.config.save()
|
self.config.save()
|
||||||
click.secho(f'Credentials saved to config file at "{self.config._path}"')
|
click.secho(
|
||||||
|
f'Credentials saved to config file at "{self.config._path}"'
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
raise Exception
|
raise Exception
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,9 @@ class MusicDB:
|
||||||
"""Create a database at `self.path`."""
|
"""Create a database at `self.path`."""
|
||||||
with sqlite3.connect(self.path) as conn:
|
with sqlite3.connect(self.path) as conn:
|
||||||
try:
|
try:
|
||||||
conn.execute("CREATE TABLE downloads (id TEXT UNIQUE NOT NULL);")
|
conn.execute(
|
||||||
|
"CREATE TABLE downloads (id TEXT UNIQUE NOT NULL);"
|
||||||
|
)
|
||||||
logger.debug("Download-IDs database created: %s", self.path)
|
logger.debug("Download-IDs database created: %s", self.path)
|
||||||
except sqlite3.OperationalError:
|
except sqlite3.OperationalError:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -130,7 +130,9 @@ class TrackMetadata:
|
||||||
self.album = resp.get("title", "Unknown Album")
|
self.album = resp.get("title", "Unknown Album")
|
||||||
self.tracktotal = resp.get("tracks_count", 1)
|
self.tracktotal = resp.get("tracks_count", 1)
|
||||||
self.genre = resp.get("genres_list") or resp.get("genre")
|
self.genre = resp.get("genres_list") or resp.get("genre")
|
||||||
self.date = resp.get("release_date_original") or resp.get("release_date")
|
self.date = resp.get("release_date_original") or resp.get(
|
||||||
|
"release_date"
|
||||||
|
)
|
||||||
self.copyright = resp.get("copyright")
|
self.copyright = resp.get("copyright")
|
||||||
self.albumartist = safe_get(resp, "artist", "name")
|
self.albumartist = safe_get(resp, "artist", "name")
|
||||||
self.composer = safe_get(resp, "composer", "name")
|
self.composer = safe_get(resp, "composer", "name")
|
||||||
|
@ -139,7 +141,9 @@ class TrackMetadata:
|
||||||
self.disctotal = (
|
self.disctotal = (
|
||||||
max(
|
max(
|
||||||
track.get("media_number", 1)
|
track.get("media_number", 1)
|
||||||
for track in safe_get(resp, "tracks", "items", default=[{}])
|
for track in safe_get(
|
||||||
|
resp, "tracks", "items", default=[{}]
|
||||||
|
)
|
||||||
)
|
)
|
||||||
or 1
|
or 1
|
||||||
)
|
)
|
||||||
|
@ -151,7 +155,9 @@ class TrackMetadata:
|
||||||
# Non-embedded information
|
# Non-embedded information
|
||||||
self.version = resp.get("version")
|
self.version = resp.get("version")
|
||||||
self.cover_urls = OrderedDict(resp["image"])
|
self.cover_urls = OrderedDict(resp["image"])
|
||||||
self.cover_urls["original"] = self.cover_urls["large"].replace("600", "org")
|
self.cover_urls["original"] = self.cover_urls["large"].replace(
|
||||||
|
"600", "org"
|
||||||
|
)
|
||||||
self.streamable = resp.get("streamable", False)
|
self.streamable = resp.get("streamable", False)
|
||||||
self.bit_depth = resp.get("maximum_bit_depth")
|
self.bit_depth = resp.get("maximum_bit_depth")
|
||||||
self.sampling_rate = resp.get("maximum_sampling_rate")
|
self.sampling_rate = resp.get("maximum_sampling_rate")
|
||||||
|
@ -185,14 +191,22 @@ class TrackMetadata:
|
||||||
)
|
)
|
||||||
self.streamable = resp.get("allowStreaming", False)
|
self.streamable = resp.get("allowStreaming", False)
|
||||||
|
|
||||||
if q := resp.get("audioQuality"): # for album entries in single tracks
|
if q := resp.get(
|
||||||
|
"audioQuality"
|
||||||
|
): # for album entries in single tracks
|
||||||
self._get_tidal_quality(q)
|
self._get_tidal_quality(q)
|
||||||
|
|
||||||
elif self.__source == "deezer":
|
elif self.__source == "deezer":
|
||||||
self.album = resp.get("title", "Unknown Album")
|
self.album = resp.get("title", "Unknown Album")
|
||||||
self.tracktotal = resp.get("track_total", 0) or resp.get("nb_tracks", 0)
|
self.tracktotal = resp.get("track_total", 0) or resp.get(
|
||||||
|
"nb_tracks", 0
|
||||||
|
)
|
||||||
self.disctotal = (
|
self.disctotal = (
|
||||||
max(track.get("disk_number") for track in resp.get("tracks", [{}])) or 1
|
max(
|
||||||
|
track.get("disk_number")
|
||||||
|
for track in resp.get("tracks", [{}])
|
||||||
|
)
|
||||||
|
or 1
|
||||||
)
|
)
|
||||||
self.genre = safe_get(resp, "genres", "data")
|
self.genre = safe_get(resp, "genres", "data")
|
||||||
self.date = resp.get("release_date")
|
self.date = resp.get("release_date")
|
||||||
|
@ -355,7 +369,9 @@ class TrackMetadata:
|
||||||
|
|
||||||
if isinstance(self._genres, list):
|
if isinstance(self._genres, list):
|
||||||
if self.__source == "qobuz":
|
if self.__source == "qobuz":
|
||||||
genres: Iterable = re.findall(r"([^\u2192\/]+)", "/".join(self._genres))
|
genres: Iterable = re.findall(
|
||||||
|
r"([^\u2192\/]+)", "/".join(self._genres)
|
||||||
|
)
|
||||||
genres = set(genres)
|
genres = set(genres)
|
||||||
elif self.__source == "deezer":
|
elif self.__source == "deezer":
|
||||||
genres = ", ".join(g["name"] for g in self._genres)
|
genres = ", ".join(g["name"] for g in self._genres)
|
||||||
|
@ -387,7 +403,9 @@ class TrackMetadata:
|
||||||
if hasattr(self, "_copyright"):
|
if hasattr(self, "_copyright"):
|
||||||
if self._copyright is None:
|
if self._copyright is None:
|
||||||
return None
|
return None
|
||||||
copyright: str = re.sub(r"(?i)\(P\)", PHON_COPYRIGHT, self._copyright)
|
copyright: str = re.sub(
|
||||||
|
r"(?i)\(P\)", PHON_COPYRIGHT, self._copyright
|
||||||
|
)
|
||||||
copyright = re.sub(r"(?i)\(C\)", COPYRIGHT, copyright)
|
copyright = re.sub(r"(?i)\(C\)", COPYRIGHT, copyright)
|
||||||
return copyright
|
return copyright
|
||||||
|
|
||||||
|
@ -475,7 +493,12 @@ class TrackMetadata:
|
||||||
for k, v in FLAC_KEY.items():
|
for k, v in FLAC_KEY.items():
|
||||||
tag = getattr(self, k)
|
tag = getattr(self, k)
|
||||||
if tag:
|
if tag:
|
||||||
if k in ("tracknumber", "discnumber", "tracktotal", "disctotal"):
|
if k in (
|
||||||
|
"tracknumber",
|
||||||
|
"discnumber",
|
||||||
|
"tracktotal",
|
||||||
|
"disctotal",
|
||||||
|
):
|
||||||
tag = f"{int(tag):02}"
|
tag = f"{int(tag):02}"
|
||||||
|
|
||||||
logger.debug("Adding tag %s: %s", v, tag)
|
logger.debug("Adding tag %s: %s", v, tag)
|
||||||
|
@ -574,7 +597,9 @@ class TrackMetadata:
|
||||||
|
|
||||||
:rtype: int
|
:rtype: int
|
||||||
"""
|
"""
|
||||||
return sum(hash(v) for v in self.asdict().values() if isinstance(v, Hashable))
|
return sum(
|
||||||
|
hash(v) for v in self.asdict().values() if isinstance(v, Hashable)
|
||||||
|
)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
"""Return the string representation of the metadata object.
|
"""Return the string representation of the metadata object.
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import functools
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
@ -106,7 +105,9 @@ class Album(Tracklist):
|
||||||
# Generate the folder name
|
# Generate the folder name
|
||||||
self.folder_format = kwargs.get("folder_format", FOLDER_FORMAT)
|
self.folder_format = kwargs.get("folder_format", FOLDER_FORMAT)
|
||||||
if not hasattr(self, "quality"):
|
if not hasattr(self, "quality"):
|
||||||
self.quality = min(kwargs.get("quality", 3), self.client.max_quality)
|
self.quality = min(
|
||||||
|
kwargs.get("quality", 3), self.client.max_quality
|
||||||
|
)
|
||||||
|
|
||||||
self.folder = self._get_formatted_folder(
|
self.folder = self._get_formatted_folder(
|
||||||
kwargs.get("parent_folder", "StreamripDownloads"), self.quality
|
kwargs.get("parent_folder", "StreamripDownloads"), self.quality
|
||||||
|
@ -181,7 +182,9 @@ class Album(Tracklist):
|
||||||
"""
|
"""
|
||||||
logger.debug("Downloading track to %s", self.folder)
|
logger.debug("Downloading track to %s", self.folder)
|
||||||
if self.disctotal > 1 and isinstance(track, Track):
|
if self.disctotal > 1 and isinstance(track, Track):
|
||||||
disc_folder = os.path.join(self.folder, f"Disc {track.meta.discnumber}")
|
disc_folder = os.path.join(
|
||||||
|
self.folder, f"Disc {track.meta.discnumber}"
|
||||||
|
)
|
||||||
kwargs["parent_folder"] = disc_folder
|
kwargs["parent_folder"] = disc_folder
|
||||||
else:
|
else:
|
||||||
kwargs["parent_folder"] = self.folder
|
kwargs["parent_folder"] = self.folder
|
||||||
|
@ -194,7 +197,10 @@ class Album(Tracklist):
|
||||||
logger.debug("tagging tracks")
|
logger.debug("tagging tracks")
|
||||||
# deezer tracks come tagged
|
# deezer tracks come tagged
|
||||||
if kwargs.get("tag_tracks", True) and self.client.source != "deezer":
|
if kwargs.get("tag_tracks", True) and self.client.source != "deezer":
|
||||||
track.tag(cover=self.cover_obj, embed_cover=kwargs.get("embed_cover", True))
|
track.tag(
|
||||||
|
cover=self.cover_obj,
|
||||||
|
embed_cover=kwargs.get("embed_cover", True),
|
||||||
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -260,7 +266,9 @@ class Album(Tracklist):
|
||||||
# lossy codecs don't have these metrics
|
# lossy codecs don't have these metrics
|
||||||
self.bit_depth = self.sampling_rate = None
|
self.bit_depth = self.sampling_rate = None
|
||||||
|
|
||||||
formatted_folder = clean_format(self.folder_format, self._get_formatter())
|
formatted_folder = clean_format(
|
||||||
|
self.folder_format, self._get_formatter()
|
||||||
|
)
|
||||||
|
|
||||||
return os.path.join(parent_folder, formatted_folder)
|
return os.path.join(parent_folder, formatted_folder)
|
||||||
|
|
||||||
|
@ -373,7 +381,9 @@ class Playlist(Tracklist):
|
||||||
if self.client.source == "qobuz":
|
if self.client.source == "qobuz":
|
||||||
self.name = self.meta["name"]
|
self.name = self.meta["name"]
|
||||||
self.image = self.meta["images"]
|
self.image = self.meta["images"]
|
||||||
self.creator = safe_get(self.meta, "owner", "name", default="Qobuz")
|
self.creator = safe_get(
|
||||||
|
self.meta, "owner", "name", default="Qobuz"
|
||||||
|
)
|
||||||
|
|
||||||
tracklist = self.meta["tracks"]["items"]
|
tracklist = self.meta["tracks"]["items"]
|
||||||
|
|
||||||
|
@ -386,7 +396,9 @@ class Playlist(Tracklist):
|
||||||
elif self.client.source == "tidal":
|
elif self.client.source == "tidal":
|
||||||
self.name = self.meta["title"]
|
self.name = self.meta["title"]
|
||||||
self.image = tidal_cover_url(self.meta["image"], 640)
|
self.image = tidal_cover_url(self.meta["image"], 640)
|
||||||
self.creator = safe_get(self.meta, "creator", "name", default="TIDAL")
|
self.creator = safe_get(
|
||||||
|
self.meta, "creator", "name", default="TIDAL"
|
||||||
|
)
|
||||||
|
|
||||||
tracklist = self.meta["tracks"]
|
tracklist = self.meta["tracks"]
|
||||||
|
|
||||||
|
@ -403,7 +415,9 @@ class Playlist(Tracklist):
|
||||||
elif self.client.source == "deezer":
|
elif self.client.source == "deezer":
|
||||||
self.name = self.meta["title"]
|
self.name = self.meta["title"]
|
||||||
self.image = self.meta["picture_big"]
|
self.image = self.meta["picture_big"]
|
||||||
self.creator = safe_get(self.meta, "creator", "name", default="Deezer")
|
self.creator = safe_get(
|
||||||
|
self.meta, "creator", "name", default="Deezer"
|
||||||
|
)
|
||||||
|
|
||||||
tracklist = self.meta["tracks"]
|
tracklist = self.meta["tracks"]
|
||||||
|
|
||||||
|
@ -446,7 +460,9 @@ class Playlist(Tracklist):
|
||||||
|
|
||||||
logger.debug(f"Loaded {len(self)} tracks from playlist {self.name}")
|
logger.debug(f"Loaded {len(self)} tracks from playlist {self.name}")
|
||||||
|
|
||||||
def _prepare_download(self, parent_folder: str = "StreamripDownloads", **kwargs):
|
def _prepare_download(
|
||||||
|
self, parent_folder: str = "StreamripDownloads", **kwargs
|
||||||
|
):
|
||||||
fname = sanitize_filename(self.name)
|
fname = sanitize_filename(self.name)
|
||||||
self.folder = os.path.join(parent_folder, fname)
|
self.folder = os.path.join(parent_folder, fname)
|
||||||
|
|
||||||
|
@ -603,7 +619,10 @@ class Artist(Tracklist):
|
||||||
self.append(Album.from_api(album, self.client))
|
self.append(Album.from_api(album, self.client))
|
||||||
|
|
||||||
def _prepare_download(
|
def _prepare_download(
|
||||||
self, parent_folder: str = "StreamripDownloads", filters: tuple = (), **kwargs
|
self,
|
||||||
|
parent_folder: str = "StreamripDownloads",
|
||||||
|
filters: tuple = (),
|
||||||
|
**kwargs,
|
||||||
) -> Iterable:
|
) -> Iterable:
|
||||||
"""Prepare the download.
|
"""Prepare the download.
|
||||||
|
|
||||||
|
@ -629,7 +648,9 @@ class Artist(Tracklist):
|
||||||
final = self
|
final = self
|
||||||
|
|
||||||
if isinstance(filters, tuple) and self.client.source == "qobuz":
|
if isinstance(filters, tuple) and self.client.source == "qobuz":
|
||||||
filter_funcs = (getattr(self, f"_{filter_}") for filter_ in filters)
|
filter_funcs = (
|
||||||
|
getattr(self, f"_{filter_}") for filter_ in filters
|
||||||
|
)
|
||||||
for func in filter_funcs:
|
for func in filter_funcs:
|
||||||
final = filter(func, final)
|
final = filter(func, final)
|
||||||
|
|
||||||
|
@ -748,7 +769,10 @@ class Artist(Tracklist):
|
||||||
best_bd = bit_depth(a["bit_depth"] for a in group)
|
best_bd = bit_depth(a["bit_depth"] for a in group)
|
||||||
best_sr = sampling_rate(a["sampling_rate"] for a in group)
|
best_sr = sampling_rate(a["sampling_rate"] for a in group)
|
||||||
for album in group:
|
for album in group:
|
||||||
if album["bit_depth"] == best_bd and album["sampling_rate"] == best_sr:
|
if (
|
||||||
|
album["bit_depth"] == best_bd
|
||||||
|
and album["sampling_rate"] == best_sr
|
||||||
|
):
|
||||||
yield album
|
yield album
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
|
@ -86,7 +86,9 @@ def get_quality(quality_id: int, source: str) -> Union[str, int]:
|
||||||
raise InvalidSourceError(source)
|
raise InvalidSourceError(source)
|
||||||
|
|
||||||
possible_keys = set(q_map.keys())
|
possible_keys = set(q_map.keys())
|
||||||
assert quality_id in possible_keys, f"{quality_id} must be in {possible_keys}"
|
assert (
|
||||||
|
quality_id in possible_keys
|
||||||
|
), f"{quality_id} must be in {possible_keys}"
|
||||||
return q_map[quality_id]
|
return q_map[quality_id]
|
||||||
|
|
||||||
|
|
||||||
|
@ -112,7 +114,9 @@ def get_quality_id(bit_depth: Optional[int], sampling_rate: Optional[int]):
|
||||||
return 4
|
return 4
|
||||||
|
|
||||||
|
|
||||||
def tqdm_download(url: str, filepath: str, params: dict = None, desc: str = None):
|
def tqdm_download(
|
||||||
|
url: str, filepath: str, params: dict = None, desc: str = None
|
||||||
|
):
|
||||||
"""Download a file with a progress bar.
|
"""Download a file with a progress bar.
|
||||||
|
|
||||||
:param url: url to direct download
|
:param url: url to direct download
|
||||||
|
@ -184,7 +188,9 @@ def tidal_cover_url(uuid, size):
|
||||||
possibles = (80, 160, 320, 640, 1280)
|
possibles = (80, 160, 320, 640, 1280)
|
||||||
assert size in possibles, f"size must be in {possibles}"
|
assert size in possibles, f"size must be in {possibles}"
|
||||||
|
|
||||||
return TIDAL_COVER_URL.format(uuid=uuid.replace("-", "/"), height=size, width=size)
|
return TIDAL_COVER_URL.format(
|
||||||
|
uuid=uuid.replace("-", "/"), height=size, width=size
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def init_log(path: Optional[str] = None, level: str = "DEBUG"):
|
def init_log(path: Optional[str] = None, level: str = "DEBUG"):
|
||||||
|
@ -273,7 +279,9 @@ def gen_threadsafe_session(
|
||||||
headers = {}
|
headers = {}
|
||||||
|
|
||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
adapter = requests.adapters.HTTPAdapter(pool_connections=100, pool_maxsize=100)
|
adapter = requests.adapters.HTTPAdapter(
|
||||||
|
pool_connections=100, pool_maxsize=100
|
||||||
|
)
|
||||||
session.mount("https://", adapter)
|
session.mount("https://", adapter)
|
||||||
session.headers.update(headers)
|
session.headers.update(headers)
|
||||||
return session
|
return session
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue