Skip to content

Commit fa96e3c

Browse files
committed
tubedl: improve behavior of when YouTube does not provide metadata during initial tubeadd
1 parent 1c4f22c commit fa96e3c

File tree

3 files changed

+19
-6
lines changed

3 files changed

+19
-6
lines changed

library/createdb/tube_add.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,8 +93,10 @@ def tube_update(args=None) -> None:
9393

9494
tube_playlists = db_playlists.get_all(
9595
args,
96-
sql_filters=["AND extractor_key NOT IN ('Local', 'reddit_praw_redditor', 'reddit_praw_subreddit')"],
96+
sql_filters=["AND coalesce(extractor_key, '') NOT IN ('Local', 'reddit_praw_redditor', 'reddit_praw_subreddit')"],
9797
)
98+
99+
print("Checking", len(tube_playlists), "playlists for updates")
98100
for d in tube_playlists:
99101
tube_backend.get_playlist_metadata(
100102
args,

library/createdb/tube_backend.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -139,6 +139,7 @@ def run(self, info) -> tuple[list, dict]: # pylint: disable=arguments-renamed
139139
)
140140

141141
# TODO: webpage_url_basename seems like a weird property to check
142+
# or is this just extra guarding for YouTube behavior
142143
if webpath != playlist_path and info.get("webpage_url_basename") == "playlist":
143144
if playlist_root:
144145
if not info.get("playlist_id") or webpath == playlist_path:
@@ -187,16 +188,23 @@ def run(self, info) -> tuple[list, dict]: # pylint: disable=arguments-renamed
187188
pl = ydl.extract_info(playlist_path, download=False, process=True)
188189
log.debug("ydl.extract_info done %s", t.elapsed())
189190
except yt_dlp.DownloadError:
190-
log.error("DownloadError skipping %s", playlist_path)
191+
if args.safe:
192+
log.error("DownloadError skipping %s", playlist_path)
193+
else:
194+
log.warning("Could not scrape playlist metadata successfully (will try again [in a few days] during tubeupdate)")
195+
db_playlists.save_undownloadable(args, playlist_path)
191196
return
192197
except ExistingPlaylistVideoReached:
193198
if added_media_count > count_before_extract:
194199
sys.stderr.write("\n")
195200
db_playlists.log_problem(args, playlist_path)
196201
else:
197-
if not pl and not args.safe:
198-
log.warning("Logging undownloadable media")
199-
db_playlists.save_undownloadable(args, playlist_path)
202+
if not pl:
203+
if args.safe:
204+
log.error("DownloadError skipping %s", playlist_path)
205+
else:
206+
log.warning("Could not scrape playlist metadata successfully (will try again [in a few days] during tubeupdate)")
207+
db_playlists.save_undownloadable(args, playlist_path)
200208

201209
if args.action == consts.SC.tube_update:
202210
if added_media_count > count_before_extract:

library/mediadb/db_playlists.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -306,4 +306,7 @@ def log_problem(args, playlist_path) -> None:
306306

307307
def save_undownloadable(args, playlist_path) -> None:
308308
entry = {"path": playlist_path, "extractor_config": args.extractor_config}
309-
_add(args, objects.dict_filter_bool(entry) or {})
309+
_add(args, consolidate(args, entry))
310+
311+
if args.action == consts.SC.tube_update:
312+
update_less_frequently(args, playlist_path)

0 commit comments

Comments
 (0)