mirror of
https://github.com/nathom/streamrip.git
synced 2025-05-09 14:11:55 -04:00
Fix downloads for large soundcloud files
This commit is contained in:
parent
615a36257b
commit
ad71293f0a
2 changed files with 62 additions and 16 deletions
|
@ -43,6 +43,7 @@ from .metadata import TrackMetadata
|
|||
from .utils import (
|
||||
clean_filename,
|
||||
clean_format,
|
||||
concat_audio_files,
|
||||
decrypt_mqa_file,
|
||||
downsize_image,
|
||||
ext,
|
||||
|
@ -429,7 +430,7 @@ class Track(Media):
|
|||
:type dl_info: dict
|
||||
:rtype: str
|
||||
"""
|
||||
logger.debug("dl_info: %s", dl_info)
|
||||
# logger.debug("dl_info: %s", dl_info)
|
||||
if dl_info["type"] == "mp3":
|
||||
import m3u8
|
||||
import requests
|
||||
|
@ -447,18 +448,8 @@ class Track(Media):
|
|||
bar.update(1)
|
||||
|
||||
pool.download(callback=update_tqdm_bar)
|
||||
subprocess.call(
|
||||
(
|
||||
"ffmpeg",
|
||||
"-i",
|
||||
f"concat:{'|'.join(pool.files)}",
|
||||
"-acodec",
|
||||
"copy",
|
||||
"-loglevel",
|
||||
"panic",
|
||||
self.path,
|
||||
)
|
||||
)
|
||||
|
||||
concat_audio_files(pool.files, self.path, "mp3")
|
||||
|
||||
elif dl_info["type"] == "original":
|
||||
_quick_download(dl_info["url"], self.path, desc=self._progress_desc)
|
||||
|
|
|
@ -3,9 +3,14 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from string import Formatter
|
||||
from typing import Dict, Hashable, Iterator, Optional, Tuple, Union
|
||||
from typing import Dict, Hashable, Iterator, List, Optional, Tuple, Union
|
||||
|
||||
import requests
|
||||
from click import secho, style
|
||||
|
@ -13,13 +18,63 @@ from pathvalidate import sanitize_filename
|
|||
from requests.packages import urllib3
|
||||
from tqdm import tqdm
|
||||
|
||||
from .constants import COVER_SIZES, TIDAL_COVER_URL
|
||||
from .exceptions import InvalidQuality, InvalidSourceError
|
||||
from .constants import COVER_SIZES, MAX_FILES_OPEN, TIDAL_COVER_URL
|
||||
from .exceptions import FfmpegError, InvalidQuality, InvalidSourceError
|
||||
|
||||
urllib3.disable_warnings()
|
||||
logger = logging.getLogger("streamrip")
|
||||
|
||||
|
||||
def concat_audio_files(paths: List[str], out: str, ext: str):
|
||||
logger.debug("Concatenating %d files", len(paths))
|
||||
if len(paths) == 1:
|
||||
shutil.move(paths[0], out)
|
||||
return
|
||||
|
||||
it = iter(paths)
|
||||
num_batches = len(paths) // MAX_FILES_OPEN + (
|
||||
1 if len(paths) % MAX_FILES_OPEN != 0 else 0
|
||||
)
|
||||
logger.debug(
|
||||
"Using %d batches with max file limit of %d", num_batches, MAX_FILES_OPEN
|
||||
)
|
||||
tempdir = tempfile.gettempdir()
|
||||
outpaths = [
|
||||
os.path.join(
|
||||
tempdir, f"__streamrip_ffmpeg_{hash(paths[i*MAX_FILES_OPEN])}.{ext}"
|
||||
)
|
||||
for i in range(num_batches)
|
||||
]
|
||||
|
||||
for p in outpaths:
|
||||
try:
|
||||
os.remove(p) # in case of failure
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
logger.debug("Batch outfiles: %s", outpaths)
|
||||
|
||||
for i in range(num_batches):
|
||||
logger.debug("Batch %d", i)
|
||||
proc = subprocess.run(
|
||||
(
|
||||
"ffmpeg",
|
||||
"-i",
|
||||
f"concat:{'|'.join(itertools.islice(it, MAX_FILES_OPEN))}",
|
||||
"-acodec",
|
||||
"copy",
|
||||
"-loglevel",
|
||||
"panic",
|
||||
outpaths[i],
|
||||
),
|
||||
# capture_output=True,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
raise FfmpegError(proc.stderr)
|
||||
|
||||
concat_audio_files(outpaths, out, ext)
|
||||
|
||||
|
||||
def safe_get(d: dict, *keys: Hashable, default=None):
|
||||
"""Traverse dict layers safely.
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue