1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
|
#!/usr/bin/env python
# yt - A fully featured command line YouTube client
#
# Copyright (C) 2024 Benedikt Peetz <benedikt.peetz@b-peetz.de>
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Yt.
#
# You should have received a copy of the License along with this program.
# If not, see <https://www.gnu.org/licenses/gpl-3.0.txt>.
# This has been take from the `ytcc` updater code (at `8893bc98428cb78d458a9cf3ded03f519d86a46b`).
# Source URL: https://github.com/woefe/ytcc/commit/8893bc98428cb78d458a9cf3ded03f519d86a46b
from blake3 import blake3
from dataclasses import dataclass
from functools import partial
from typing import Any, Iterable, Optional, Tuple, TypeVar
import asyncio
import itertools
import json
import logging
import sys
import yt_dlp
@dataclass(frozen=True)
class Playlist:
name: str
url: str
reverse: bool
@dataclass(frozen=True)
class Video:
url: str
title: str
description: str
publish_date: float
watch_date: Optional[float]
duration: float
thumbnail_url: Optional[str]
extractor_hash: str
@property
def watched(self) -> bool:
return self.watch_date is not None
logger = logging.getLogger("yt")
logging.basicConfig(encoding="utf-8", level=int(sys.argv[3]))
_ytdl_logger = logging.getLogger("yt_dlp")
_ytdl_logger.propagate = False
_ytdl_logger.addHandler(logging.NullHandler())
YTDL_COMMON_OPTS = {"logger": _ytdl_logger}
T = TypeVar("T")
def take(amount: int, iterable: Iterable[T]) -> Iterable[T]:
"""Take the first elements of an iterable.
If the given iterable has less elements than the given amount, the returned iterable has the
same amount of elements as the given iterable. Otherwise the returned iterable has `amount`
elements.
:param amount: The number of elements to take
:param iterable: The iterable to take elements from
:return: The first elements of the given iterable
"""
for _, elem in zip(range(amount), iterable):
yield elem
class Fetcher:
def __init__(self, max_backlog):
self.max_items = max_backlog
self.ydl_opts = {
**YTDL_COMMON_OPTS,
"playliststart": 1,
"playlistend": max_backlog,
"noplaylist": False,
"extractor_args": {"youtubetab": {"approximate_date": [""]}},
}
async def get_unprocessed_entries(
self, url: str, hashes: Iterable[str]
) -> Iterable[Tuple[str, str, Any]]:
result = []
with yt_dlp.YoutubeDL(self.ydl_opts) as ydl:
logger.info("Checking playlist '%s'...", url)
try:
loop = asyncio.get_event_loop()
info = await loop.run_in_executor(
None,
partial(ydl.extract_info, url, download=False, process=False),
)
except yt_dlp.DownloadError as download_error:
logger.error(
"Failed to get playlist '%s'. Error was: '%s'",
url,
download_error,
)
else:
entries = info.get("entries", [])
for entry in take(self.max_items, entries):
logger.debug(json.dumps(entry))
id = str.encode(yt_dlp.utils.unsmuggle_url(entry["id"])[0])
ehash = blake3(id).hexdigest()
if ehash not in hashes:
result.append((url, entry))
return result
def _process_ie(self, entry):
with yt_dlp.YoutubeDL(self.ydl_opts) as ydl:
processed = ydl.process_ie_result(entry, False)
# walk through the ie_result dictionary to force evaluation of lazily loaded resources
repr(processed)
return processed
async def process_entry(self, url: str, entry: Any) -> Optional[Any]:
try:
loop = asyncio.get_event_loop()
processed = await loop.run_in_executor(None, self._process_ie, entry)
except yt_dlp.DownloadError as download_error:
logger.error(
"Failed to get a video of playlist '%s'. Error was: '%s'",
url,
download_error,
)
return None
else:
print(json.dumps({url: processed}))
class Updater:
def __init__(self, max_backlog=20):
self.max_items = max_backlog
self.fetcher = Fetcher(max_backlog)
self.hashes = None
async def update_url(self, url: str):
logger.info(f"Updating {url}...")
new_entries = await self.fetcher.get_unprocessed_entries(url, self.hashes)
await asyncio.gather(
*itertools.starmap(self.fetcher.process_entry, new_entries)
)
async def do_update(self, urls: Iterable[str]):
await asyncio.gather(*map(self.update_url, urls))
def update(self, urls: Iterable[str], hashes: Iterable[str]):
self.hashes = hashes
asyncio.run(self.do_update(urls))
def update():
max_backlog = int(sys.argv[1])
subscriptions_number = int(sys.argv[2])
u = Updater(max_backlog=max_backlog)
u.update(
sys.argv[4 : (4 + subscriptions_number)], sys.argv[(4 + subscriptions_number) :]
)
logger.debug(sys.argv)
update()
|