Switch to json for storing search results

This commit is contained in:
Nathan Thomas 2023-12-23 11:09:31 -08:00
parent 725553e501
commit 71f71d554c
3 changed files with 43 additions and 11 deletions

View File

@ -46,7 +46,7 @@ class ArtistSummary(Summary):
@classmethod
def from_item(cls, item: dict):
id = item["id"]
id = str(item["id"])
name = (
item.get("name")
or item.get("performer", {}).get("name")
@ -80,7 +80,7 @@ class TrackSummary(Summary):
@classmethod
def from_item(cls, item: dict):
id = item["id"]
id = str(item["id"])
name = item.get("title") or item.get("name") or "Unknown"
artist = (
item.get("performer", {}).get("name")
@ -126,7 +126,7 @@ class AlbumSummary(Summary):
@classmethod
def from_item(cls, item: dict):
id = item["id"]
id = str(item["id"])
name = item.get("title") or "Unknown Title"
artist = (
item.get("performer", {}).get("name")
@ -174,7 +174,7 @@ class LabelSummary(Summary):
@classmethod
def from_item(cls, item: dict):
id = item["id"]
id = str(item["id"])
name = item["name"]
return cls(id, name)
@ -273,3 +273,14 @@ class SearchResults:
assert ind is not None
i = int(ind.group(0))
return self.results[i - 1].preview()
def as_list(self, source: str) -> list[dict[str, str]]:
return [
{
"source": source,
"media_type": i.media_type(),
"id": i.id,
"desc": i.summarize(),
}
for i in self.results
]

View File

@ -1,4 +1,5 @@
import asyncio
import json
import logging
import os
import shutil
@ -147,7 +148,11 @@ async def url(ctx, urls):
@rip.command()
@click.argument("path", required=True)
@click.argument(
"path",
required=True,
type=click.Path(exists=True, readable=True, file_okay=True, dir_okay=False),
)
@click.pass_context
@coro
async def file(ctx, path):
@ -159,8 +164,26 @@ async def file(ctx, path):
"""
with ctx.obj["config"] as cfg:
async with Main(cfg) as main:
async with aiofiles.open(path) as f:
await main.add_all([line async for line in f])
async with aiofiles.open(path, "r") as f:
try:
items = json.loads(await f.read())
loaded = True
except json.JSONDecodeError:
items = [line async for line in f]
loaded = False
if loaded:
console.print(
f"Detected json file. Loading [yellow]{len(items)}[/yellow] items"
)
await main.add_all_by_id(
[(i["source"], i["media_type"], i["id"]) for i in items]
)
else:
console.print(
f"Detected list of urls. Loading [yellow]{len(items)}[/yellow] items"
)
await main.add_all(items)
await main.resolve()
await main.rip()

View File

@ -1,4 +1,5 @@
import asyncio
import json
import logging
import platform
@ -208,10 +209,7 @@ class Main:
return
search_results = SearchResults.from_pages(source, media_type, pages)
file_contents = "\n".join(
f"{self.dummy_url(source, item.media_type(), item.id)} [{item.summarize()}]"
for item in search_results.results
)
file_contents = json.dumps(search_results.as_list(source), indent=4)
async with aiofiles.open(filepath, "w") as f:
await f.write(file_contents)