@@ -139,6 +139,7 @@ def run(self, info) -> tuple[list, dict]: # pylint: disable=arguments-renamed
139139 )
140140
141141 # TODO: webpage_url_basename seems like a weird property to check
142+ # or is this just extra guarding for YouTube behavior
142143 if webpath != playlist_path and info .get ("webpage_url_basename" ) == "playlist" :
143144 if playlist_root :
144145 if not info .get ("playlist_id" ) or webpath == playlist_path :
@@ -187,16 +188,23 @@ def run(self, info) -> tuple[list, dict]: # pylint: disable=arguments-renamed
187188 pl = ydl .extract_info (playlist_path , download = False , process = True )
188189 log .debug ("ydl.extract_info done %s" , t .elapsed ())
189190 except yt_dlp .DownloadError :
190- log .error ("DownloadError skipping %s" , playlist_path )
191+ if args .safe :
192+ log .error ("DownloadError skipping %s" , playlist_path )
193+ else :
194+ log .warning ("Could not scrape playlist metadata successfully (will try again [in a few days] during tubeupdate)" )
195+ db_playlists .save_undownloadable (args , playlist_path )
191196 return
192197 except ExistingPlaylistVideoReached :
193198 if added_media_count > count_before_extract :
194199 sys .stderr .write ("\n " )
195200 db_playlists .log_problem (args , playlist_path )
196201 else :
197- if not pl and not args .safe :
198- log .warning ("Logging undownloadable media" )
199- db_playlists .save_undownloadable (args , playlist_path )
202+ if not pl :
203+ if args .safe :
204+ log .error ("DownloadError skipping %s" , playlist_path )
205+ else :
206+ log .warning ("Could not scrape playlist metadata successfully (will try again [in a few days] during tubeupdate)" )
207+ db_playlists .save_undownloadable (args , playlist_path )
200208
201209 if args .action == consts .SC .tube_update :
202210 if added_media_count > count_before_extract :
0 commit comments