Skip to content

Commit 28d1bf3

Browse files
committed
3.0.160
1 parent 0c142a7 commit 28d1bf3

File tree

5 files changed

+98
-113
lines changed

5 files changed

+98
-113
lines changed

.github/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ To stop playing press Ctrl+C in either the terminal or mpv
9999
<details><summary>List all subcommands</summary>
100100

101101
$ library
102-
library (v3.0.159; 104 subcommands)
102+
library (v3.0.160; 104 subcommands)
103103

104104
Create database subcommands:
105105
╭─────────────────┬──────────────────────────────────────────╮

library/__main__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from library.utils import argparse_utils, iterables
66
from library.utils.log_utils import log
77

8-
__version__ = "3.0.159"
8+
__version__ = "3.0.160"
99

1010
progs = {
1111
"Create database subcommands": {

library/createdb/gallery_backend.py

Lines changed: 40 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,9 @@
1-
import itertools, os
1+
import os
22
from pathlib import Path
33
from types import ModuleType
44

55
import gallery_dl
6-
from gallery_dl.exception import StopExtraction
7-
from gallery_dl.extractor.message import Message
86
from gallery_dl.job import Job
9-
from gallery_dl.util import build_duration_func
107

118
from library.mediadb import db_media, db_playlists
129
from library.utils import consts, printing, strings
@@ -141,70 +138,57 @@ def download(args, m):
141138
)
142139

143140

144-
class GeneratorJob(Job):
145-
def __init__(self, *args, **kwargs):
146-
super().__init__(*args, **kwargs)
147-
if hasattr(super(), "_init"):
148-
super()._init()
149-
self.dispatched = False
141+
class UrlJob(Job):
142+
resolve = 1 # depth of queue resolution
143+
144+
def __init__(self, url, parent=None, resolve=None):
145+
super().__init__(url, parent)
146+
self.results = []
150147
self.visited = set()
151-
self.status = 0
152-
153-
def message_generator(self):
154-
extractor = self.extractor
155-
sleep = build_duration_func(extractor.config("sleep-extractor"))
156-
if sleep:
157-
extractor.sleep(sleep(), "extractor")
158-
159-
try:
160-
for msg in extractor:
161-
self.dispatch(msg)
162-
if self.dispatched:
163-
yield msg
164-
self.dispatched = False
165-
except StopExtraction:
166-
pass
167-
168-
def run(self):
169-
for msg in self.message_generator():
170-
ident, url, kwdict = msg
171-
if ident == Message.Url:
172-
yield (msg[1], msg[2])
173-
174-
elif ident == Message.Queue:
175-
if url in self.visited:
176-
continue
177-
self.visited.add(url)
178-
179-
cls = kwdict.get("_extractor")
180-
if cls:
181-
extr = cls.from_url(url)
182-
else:
183-
extr = self.extractor.find(url)
184-
185-
if extr:
186-
job = self.__class__(extr, self)
187-
yield from job.run()
188-
else:
189-
raise TypeError
148+
if resolve is not None:
149+
self.resolve = resolve
150+
151+
if self.resolve > 0:
152+
self.handle_queue = self.handle_queue_resolve
190153

191154
def handle_url(self, url, kwdict):
192-
self.dispatched = True
155+
self.results.append((url, kwdict))
193156

194157
def handle_queue(self, url, kwdict):
195-
self.dispatched = True
158+
# unresolved queue entry
159+
self.results.append((url, kwdict))
160+
161+
def handle_queue_resolve(self, url, kwdict):
162+
if url in self.visited:
163+
return
164+
self.visited.add(url)
165+
166+
cls = kwdict.get("_extractor")
167+
if cls:
168+
extr = cls.from_url(url)
169+
else:
170+
extr = self.extractor.find(url)
171+
172+
if not extr:
173+
self.results.append((url, kwdict))
174+
return
175+
176+
job = self.__class__(extr, self, self.resolve - 1)
177+
job.results = self.results # shared accumulator
178+
job.visited = self.visited # shared visited set
179+
job.run()
196180

197181

198182
def get_playlist_metadata(args, playlist_path):
199183
gallery_dl = load_module_level_gallery_dl(args)
200184

201185
added_media_count = 0
202-
job = GeneratorJob(playlist_path)
203-
gen = job.run()
186+
job = UrlJob(playlist_path)
187+
job.run()
188+
189+
is_playlist = len(job.results) > 1
204190

205-
first_two = list(itertools.islice(gen, 2))
206-
is_playlist = len(first_two) > 1
207-
for webpath, info in itertools.chain(first_two, gen):
191+
for webpath, info in job.results:
208192
errors = parse_gdl_job_status(job.status, playlist_path)
209193
extractor_key = "gdl_" + job.extractor.category
210194

0 commit comments

Comments
 (0)