mirror of
https://github.com/nathom/streamrip.git
synced 2025-06-05 17:24:25 -04:00
Switch to json for storing search results
This commit is contained in:
parent
725553e501
commit
71f71d554c
3 changed files with 43 additions and 11 deletions
|
@ -46,7 +46,7 @@ class ArtistSummary(Summary):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_item(cls, item: dict):
|
def from_item(cls, item: dict):
|
||||||
id = item["id"]
|
id = str(item["id"])
|
||||||
name = (
|
name = (
|
||||||
item.get("name")
|
item.get("name")
|
||||||
or item.get("performer", {}).get("name")
|
or item.get("performer", {}).get("name")
|
||||||
|
@ -80,7 +80,7 @@ class TrackSummary(Summary):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_item(cls, item: dict):
|
def from_item(cls, item: dict):
|
||||||
id = item["id"]
|
id = str(item["id"])
|
||||||
name = item.get("title") or item.get("name") or "Unknown"
|
name = item.get("title") or item.get("name") or "Unknown"
|
||||||
artist = (
|
artist = (
|
||||||
item.get("performer", {}).get("name")
|
item.get("performer", {}).get("name")
|
||||||
|
@ -126,7 +126,7 @@ class AlbumSummary(Summary):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_item(cls, item: dict):
|
def from_item(cls, item: dict):
|
||||||
id = item["id"]
|
id = str(item["id"])
|
||||||
name = item.get("title") or "Unknown Title"
|
name = item.get("title") or "Unknown Title"
|
||||||
artist = (
|
artist = (
|
||||||
item.get("performer", {}).get("name")
|
item.get("performer", {}).get("name")
|
||||||
|
@ -174,7 +174,7 @@ class LabelSummary(Summary):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_item(cls, item: dict):
|
def from_item(cls, item: dict):
|
||||||
id = item["id"]
|
id = str(item["id"])
|
||||||
name = item["name"]
|
name = item["name"]
|
||||||
return cls(id, name)
|
return cls(id, name)
|
||||||
|
|
||||||
|
@ -273,3 +273,14 @@ class SearchResults:
|
||||||
assert ind is not None
|
assert ind is not None
|
||||||
i = int(ind.group(0))
|
i = int(ind.group(0))
|
||||||
return self.results[i - 1].preview()
|
return self.results[i - 1].preview()
|
||||||
|
|
||||||
|
def as_list(self, source: str) -> list[dict[str, str]]:
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"source": source,
|
||||||
|
"media_type": i.media_type(),
|
||||||
|
"id": i.id,
|
||||||
|
"desc": i.summarize(),
|
||||||
|
}
|
||||||
|
for i in self.results
|
||||||
|
]
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -147,7 +148,11 @@ async def url(ctx, urls):
|
||||||
|
|
||||||
|
|
||||||
@rip.command()
|
@rip.command()
|
||||||
@click.argument("path", required=True)
|
@click.argument(
|
||||||
|
"path",
|
||||||
|
required=True,
|
||||||
|
type=click.Path(exists=True, readable=True, file_okay=True, dir_okay=False),
|
||||||
|
)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
@coro
|
@coro
|
||||||
async def file(ctx, path):
|
async def file(ctx, path):
|
||||||
|
@ -159,8 +164,26 @@ async def file(ctx, path):
|
||||||
"""
|
"""
|
||||||
with ctx.obj["config"] as cfg:
|
with ctx.obj["config"] as cfg:
|
||||||
async with Main(cfg) as main:
|
async with Main(cfg) as main:
|
||||||
async with aiofiles.open(path) as f:
|
async with aiofiles.open(path, "r") as f:
|
||||||
await main.add_all([line async for line in f])
|
try:
|
||||||
|
items = json.loads(await f.read())
|
||||||
|
loaded = True
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
items = [line async for line in f]
|
||||||
|
loaded = False
|
||||||
|
if loaded:
|
||||||
|
console.print(
|
||||||
|
f"Detected json file. Loading [yellow]{len(items)}[/yellow] items"
|
||||||
|
)
|
||||||
|
await main.add_all_by_id(
|
||||||
|
[(i["source"], i["media_type"], i["id"]) for i in items]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
console.print(
|
||||||
|
f"Detected list of urls. Loading [yellow]{len(items)}[/yellow] items"
|
||||||
|
)
|
||||||
|
await main.add_all(items)
|
||||||
|
|
||||||
await main.resolve()
|
await main.resolve()
|
||||||
await main.rip()
|
await main.rip()
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import platform
|
import platform
|
||||||
|
|
||||||
|
@ -208,10 +209,7 @@ class Main:
|
||||||
return
|
return
|
||||||
|
|
||||||
search_results = SearchResults.from_pages(source, media_type, pages)
|
search_results = SearchResults.from_pages(source, media_type, pages)
|
||||||
file_contents = "\n".join(
|
file_contents = json.dumps(search_results.as_list(source), indent=4)
|
||||||
f"{self.dummy_url(source, item.media_type(), item.id)} [{item.summarize()}]"
|
|
||||||
for item in search_results.results
|
|
||||||
)
|
|
||||||
async with aiofiles.open(filepath, "w") as f:
|
async with aiofiles.open(filepath, "w") as f:
|
||||||
await f.write(file_contents)
|
await f.write(file_contents)
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue