Fix logger

This commit is contained in:
nathom 2021-05-12 15:19:51 -07:00
parent b97d9f4e34
commit 71ac34db7e
10 changed files with 70 additions and 31 deletions

View file

@ -41,7 +41,7 @@ from .utils import (
tqdm_download, tqdm_download,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger("streamrip")
TYPE_REGEXES = { TYPE_REGEXES = {
"remaster": re.compile(r"(?i)(re)?master(ed)?"), "remaster": re.compile(r"(?i)(re)?master(ed)?"),

View file

@ -14,7 +14,8 @@ from .constants import CACHE_DIR, CONFIG_DIR, CONFIG_PATH, QOBUZ_FEATURED_KEYS
from .core import MusicDL from .core import MusicDL
from .utils import init_log from .utils import init_log
logger = logging.getLogger(__name__) logging.basicConfig(level="INFO")
logger = logging.getLogger("streamrip")
if not os.path.isdir(CONFIG_DIR): if not os.path.isdir(CONFIG_DIR):
os.makedirs(CONFIG_DIR, exist_ok=True) os.makedirs(CONFIG_DIR, exist_ok=True)
@ -54,7 +55,8 @@ def cli(ctx, **kwargs):
global core global core
if kwargs["debug"]: if kwargs["debug"]:
init_log() logger.setLevel("DEBUG")
logger.debug("Starting debug log")
config = Config() config = Config()

View file

@ -36,7 +36,7 @@ from .exceptions import (
from .spoofbuz import Spoofer from .spoofbuz import Spoofer
from .utils import gen_threadsafe_session, get_quality from .utils import gen_threadsafe_session, get_quality
logger = logging.getLogger(__name__) logger = logging.getLogger("streamrip")
class Client(ABC): class Client(ABC):
@ -450,7 +450,6 @@ class DeezerClient(Client):
f"{DEEZER_BASE}/search/{media_type}", params={"q": query} f"{DEEZER_BASE}/search/{media_type}", params={"q": query}
) )
response.raise_for_status() response.raise_for_status()
return response.json() return response.json()
def login(self, **kwargs): def login(self, **kwargs):

View file

@ -1,10 +1,10 @@
"""A config class that manages arguments between the config file and CLI.""" """A config class that manages arguments between the config file and CLI."""
import copy import copy
from collections import OrderedDict
import logging import logging
import os import os
import re import re
from collections import OrderedDict
from pprint import pformat from pprint import pformat
from typing import Any, Dict, List from typing import Any, Dict, List
@ -22,7 +22,7 @@ from .exceptions import InvalidSourceError
yaml = YAML() yaml = YAML()
logger = logging.getLogger(__name__) logger = logging.getLogger("streamrip")
class Config: class Config:

View file

@ -9,7 +9,7 @@ from typing import Optional
from .exceptions import ConversionError from .exceptions import ConversionError
logger = logging.getLogger(__name__) logger = logging.getLogger("streamrip")
SAMPLING_RATES = (44100, 48000, 88200, 96000, 176400, 192000) SAMPLING_RATES = (44100, 48000, 88200, 96000, 176400, 192000)

View file

@ -6,8 +6,10 @@ import logging
import os import os
import re import re
import sys import sys
import time
from getpass import getpass from getpass import getpass
from hashlib import md5 from hashlib import md5
from pprint import pprint
from string import Formatter from string import Formatter
from typing import Dict, Generator, List, Optional, Tuple, Type, Union from typing import Dict, Generator, List, Optional, Tuple, Type, Union
@ -16,7 +18,13 @@ import requests
from tqdm import tqdm from tqdm import tqdm
from .bases import Track, Video, YoutubeVideo from .bases import Track, Video, YoutubeVideo
from .clients import DeezerClient, QobuzClient, SoundCloudClient, TidalClient, Client from .clients import (
Client,
DeezerClient,
QobuzClient,
SoundCloudClient,
TidalClient,
)
from .config import Config from .config import Config
from .constants import ( from .constants import (
CONFIG_PATH, CONFIG_PATH,
@ -38,7 +46,7 @@ from .exceptions import (
from .tracklists import Album, Artist, Label, Playlist, Tracklist from .tracklists import Album, Artist, Label, Playlist, Tracklist
from .utils import extract_interpreter_url from .utils import extract_interpreter_url
logger = logging.getLogger(__name__) logger = logging.getLogger("streamrip")
Media = Union[ Media = Union[
@ -344,6 +352,15 @@ class MusicDL(list):
:param urls: :param urls:
""" """
# Available keys: ['artist', 'title']
QUERY_FORMAT: Dict[str, str] = {
"tidal": "{title}",
"qobuz": "{title} {artist}",
"deezer": "{title} {artist}",
"soundcloud": "{title} {artist}",
}
# For testing: # For testing:
# https://www.last.fm/user/nathan3895/playlists/12058911 # https://www.last.fm/user/nathan3895/playlists/12058911
user_regex = re.compile(r"https://www\.last\.fm/user/([^/]+)/playlists/\d+") user_regex = re.compile(r"https://www\.last\.fm/user/([^/]+)/playlists/\d+")
@ -361,6 +378,7 @@ class MusicDL(list):
""" """
try: try:
track = next(self.search(lastfm_source, query, media_type="track")) track = next(self.search(lastfm_source, query, media_type="track"))
if self.config.session["metadata"]["set_playlist_to_album"]: if self.config.session["metadata"]["set_playlist_to_album"]:
# so that the playlist name (actually the album) isn't # so that the playlist name (actually the album) isn't
# amended to include version and work tags from individual tracks # amended to include version and work tags from individual tracks
@ -383,7 +401,11 @@ class MusicDL(list):
tracks_not_found: int = 0 tracks_not_found: int = 0
with concurrent.futures.ThreadPoolExecutor(max_workers=15) as executor: with concurrent.futures.ThreadPoolExecutor(max_workers=15) as executor:
futures = [ futures = [
executor.submit(search_query, f"{title} {artist}", pl) executor.submit(
search_query,
QUERY_FORMAT[lastfm_source].format(title=title, artist=artist),
pl,
)
for title, artist in queries for title, artist in queries
] ]
# only for the progress bar # only for the progress bar
@ -396,6 +418,7 @@ class MusicDL(list):
tracks_not_found += 1 tracks_not_found += 1
pl.loaded = True pl.loaded = True
click.secho(f"{tracks_not_found} tracks not found.", fg="yellow") click.secho(f"{tracks_not_found} tracks not found.", fg="yellow")
self.append(pl) self.append(pl)
@ -422,14 +445,16 @@ class MusicDL(list):
:type query: str :type query: str
:param media_type: :param media_type:
:type media_type: str :type media_type: str
:param limit: :param limit: Not Implemented
:type limit: int :type limit: int
:rtype: Generator :rtype: Generator
""" """
print(logger)
logger.debug("searching for %s", query)
client = self.get_client(source) client = self.get_client(source)
results = client.search(query, media_type) results = client.search(query, media_type)
i = 0
if isinstance(results, Generator): # QobuzClient if isinstance(results, Generator): # QobuzClient
for page in results: for page in results:
tracklist = ( tracklist = (
@ -437,11 +462,10 @@ class MusicDL(list):
if media_type != "featured" if media_type != "featured"
else page["albums"]["items"] else page["albums"]["items"]
) )
for item in tracklist: for i, item in enumerate(tracklist):
yield MEDIA_CLASS[ # type: ignore yield MEDIA_CLASS[ # type: ignore
media_type if media_type != "featured" else "album" media_type if media_type != "featured" else "album"
].from_api(item, client) ].from_api(item, client)
i += 1
if i > limit: if i > limit:
return return
else: else:
@ -451,9 +475,9 @@ class MusicDL(list):
if items is None: if items is None:
raise NoResultsFound(query) raise NoResultsFound(query)
for item in items: for i, item in enumerate(items):
logger.debug(item["title"])
yield MEDIA_CLASS[media_type].from_api(item, client) # type: ignore yield MEDIA_CLASS[media_type].from_api(item, client) # type: ignore
i += 1
if i > limit: if i > limit:
return return
@ -589,6 +613,9 @@ class MusicDL(list):
:type url: str :type url: str
:rtype: Tuple[str, list] :rtype: Tuple[str, list]
""" """
logger.debug("Fetching lastfm playlist")
info = [] info = []
words = re.compile(r"[\w\s]+") words = re.compile(r"[\w\s]+")
title_tags = re.compile('title="([^"]+)"') title_tags = re.compile('title="([^"]+)"')
@ -610,7 +637,7 @@ class MusicDL(list):
if remaining_tracks_match is not None: if remaining_tracks_match is not None:
remaining_tracks = int(remaining_tracks_match.group(1)) - 50 remaining_tracks = int(remaining_tracks_match.group(1)) - 50
else: else:
raise Exception("Error parsing lastfm page") raise ParsingError("Error parsing lastfm page")
playlist_title_match = re.search( playlist_title_match = re.search(
r'<h1 class="playlisting-playlist-header-title">([^<]+)</h1>', r.text r'<h1 class="playlisting-playlist-header-title">([^<]+)</h1>', r.text
@ -618,14 +645,25 @@ class MusicDL(list):
if playlist_title_match is not None: if playlist_title_match is not None:
playlist_title = html.unescape(playlist_title_match.group(1)) playlist_title = html.unescape(playlist_title_match.group(1))
else: else:
raise Exception("Error finding title from response") raise ParsingError("Error finding title from response")
page = 1 if remaining_tracks > 0:
while remaining_tracks > 0: with concurrent.futures.ThreadPoolExecutor(max_workers=15) as executor:
page += 1 last_page = int(remaining_tracks // 50) + int(
r = requests.get(f"{url}?page={page}") remaining_tracks % 50 != 0
get_titles(r.text) )
remaining_tracks -= 50
futures = [
executor.submit(requests.get, f"{url}?page={page}")
for page in range(1, last_page + 1)
]
for future in tqdm(
concurrent.futures.as_completed(futures),
total=len(futures),
desc="Scraping playlist",
):
get_titles(future.result().text)
return playlist_title, info return playlist_title, info

View file

@ -5,7 +5,7 @@ import os
import sqlite3 import sqlite3
from typing import Union from typing import Union
logger = logging.getLogger(__name__) logger = logging.getLogger("streamrip")
class MusicDB: class MusicDB:

View file

@ -20,7 +20,7 @@ from .constants import (
from .exceptions import InvalidContainerError, InvalidSourceError from .exceptions import InvalidContainerError, InvalidSourceError
from .utils import get_quality_id, safe_get, tidal_cover_url from .utils import get_quality_id, safe_get, tidal_cover_url
logger = logging.getLogger(__name__) logger = logging.getLogger("streamrip")
class TrackMetadata: class TrackMetadata:

View file

@ -26,7 +26,7 @@ from .utils import (
tqdm_download, tqdm_download,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger("streamrip")
class Album(Tracklist): class Album(Tracklist):

View file

@ -22,7 +22,7 @@ from .constants import AGENT, LOG_DIR, TIDAL_COVER_URL
from .exceptions import InvalidSourceError, NonStreamable from .exceptions import InvalidSourceError, NonStreamable
urllib3.disable_warnings() urllib3.disable_warnings()
logger = logging.getLogger(__name__) logger = logging.getLogger("streamrip")
def safe_get(d: dict, *keys: Hashable, default=None): def safe_get(d: dict, *keys: Hashable, default=None):
@ -197,9 +197,9 @@ def init_log(path: Optional[str] = None, level: str = "DEBUG"):
:param rotate: :param rotate:
:type rotate: str :type rotate: str
""" """
path = os.path.join(LOG_DIR, "streamrip.log") # path = os.path.join(LOG_DIR, "streamrip.log")
level = logging.getLevelName(level) level = logging.getLevelName(level)
logging.basicConfig(filename=path, filemode="a", level=level) logging.basicConfig(level=level)
def decrypt_mqa_file(in_path, out_path, encryption_key): def decrypt_mqa_file(in_path, out_path, encryption_key):