diff options
Diffstat (limited to 'python_update')
-rwxr-xr-x | python_update/raw_update.py | 32 |
1 files changed, 21 insertions, 11 deletions
diff --git a/python_update/raw_update.py b/python_update/raw_update.py index 82be0a1..6f5b78d 100755 --- a/python_update/raw_update.py +++ b/python_update/raw_update.py @@ -13,14 +13,15 @@ # This has been take from the `ytcc` updater code (at `8893bc98428cb78d458a9cf3ded03f519d86a46b`). # Source URL: https://github.com/woefe/ytcc/commit/8893bc98428cb78d458a9cf3ded03f519d86a46b +from blake3 import blake3 +from dataclasses import dataclass +from functools import partial +from typing import Any, Iterable, Optional, Tuple, TypeVar import asyncio import itertools import json import logging import sys -from dataclasses import dataclass -from functools import partial -from typing import Any, Iterable, Optional, Tuple, TypeVar import yt_dlp @@ -85,7 +86,9 @@ class Fetcher: "extractor_args": {"youtubetab": {"approximate_date": [""]}}, } - async def get_unprocessed_entries(self, url: str) -> Iterable[Tuple[str, Any]]: + async def get_unprocessed_entries( + self, url: str, hashes: Iterable[str] + ) -> Iterable[Tuple[str, str, Any]]: result = [] with yt_dlp.YoutubeDL(self.ydl_opts) as ydl: logger.info("Checking playlist '%s'...", url) @@ -104,7 +107,10 @@ class Fetcher: else: entries = info.get("entries", []) for entry in take(self.max_items, entries): - result.append((url, entry)) + id = str.encode(yt_dlp.utils.unsmuggle_url(entry["id"])[0]) + ehash = blake3(id).hexdigest() + if ehash not in hashes: + result.append((url, entry)) return result def _process_ie(self, entry): @@ -135,10 +141,11 @@ class Updater: def __init__(self, max_backlog=20): self.max_items = max_backlog self.fetcher = Fetcher(max_backlog) + self.hashes = None async def update_url(self, url: str): print(f"Updating {url}...", file=sys.stderr) - new_entries = await self.fetcher.get_unprocessed_entries(url) + new_entries = await self.fetcher.get_unprocessed_entries(url, self.hashes) await asyncio.gather( *itertools.starmap(self.fetcher.process_entry, new_entries) @@ -147,14 +154,17 @@ class Updater: async def do_update(self, urls: Iterable[str]): await asyncio.gather(*map(self.update_url, urls)) - def update(self, urls: Iterable[str]): + def update(self, urls: Iterable[str], hashes: Iterable[str]): + self.hashes = hashes asyncio.run(self.do_update(urls)) -def update(max_backlog: int): +def update(): + max_backlog = int(sys.argv[1]) + subscriptions_number = int(sys.argv[2]) u = Updater(max_backlog=max_backlog) - u.update(sys.argv[2:]) + u.update(sys.argv[3:(3 + subscriptions_number)], sys.argv[(3 + subscriptions_number):]) -max_backlog = int(sys.argv[1]) -update(max_backlog) +print(sys.argv, file=sys.stderr) +update() |