Skip to content

Commit 63c2964

Browse files
committed
misc
1 parent 0fe5eac commit 63c2964

22 files changed

+443
-204
lines changed

library/createdb/computers_add.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from pathlib import Path
33

44
from library import usage
5-
from library.mediadb import db_playlists
5+
from library.mediadb import db_media, db_playlists
66
from library.utils import arggroups, argparse_utils, consts, db_utils, objects, remote_processes
77
from library.utils.log_utils import log
88

@@ -102,4 +102,7 @@ def computer_add(args, hostnames):
102102
def computers_add():
103103
args = parse_args()
104104

105+
db_playlists.create(args)
106+
db_media.create(args)
107+
105108
computer_add(args, args.hostnames)

library/createdb/fs_add.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -296,6 +296,8 @@ def fs_add(args=None) -> None:
296296
sys.argv = ["lb", *args]
297297

298298
args = parse_args(SC.fs_add, usage.fs_add)
299+
db_playlists.create(args)
300+
db_media.create(args)
299301
extractor(args, args.paths)
300302

301303

library/createdb/links_add.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -370,9 +370,9 @@ def links_update() -> None:
370370
new_media = extractor(args_env, playlist["path"])
371371

372372
if new_media > 0:
373-
db_playlists.decrease_update_delay(args, playlist["path"])
373+
db_playlists.update_more_frequently(args, playlist["path"])
374374
else:
375-
db_playlists.increase_update_delay(args, playlist["path"])
375+
db_playlists.update_less_frequently(args, playlist["path"])
376376

377377
if playlist_count > 3:
378378
time.sleep(random.uniform(0.05, 2))

library/createdb/reddit_add.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -364,8 +364,8 @@ def reddit_update(args=None) -> None:
364364
elif extractor_key == "reddit_praw_redditor":
365365
redditor_new(args, {"path": path, "name": name})
366366

367-
db_playlists.decrease_update_delay(args, playlist["path"])
367+
db_playlists.update_more_frequently(args, playlist["path"])
368368
except skip_errors as e:
369-
db_playlists.increase_update_delay(args, playlist["path"])
369+
db_playlists.update_less_frequently(args, playlist["path"])
370370
log.error("[%s] skipping: %s", name, e)
371371
continue

library/createdb/tabs_add.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,8 @@ def tabs_add(args=None) -> None:
8484
args = parse_args()
8585
paths = list(gen_paths(args))
8686

87+
db_media.create(args)
88+
8789
tabs = iterables.list_dict_filter_bool([consolidate_url(args, path) for path in get_new_paths(args, paths)])
8890
for tab in tabs:
8991
db_media.add(args, tab)

library/createdb/torrents_add.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from pathlib import Path
33

44
from library import usage
5-
from library.mediadb import db_playlists
5+
from library.mediadb import db_media, db_playlists
66
from library.utils import arg_utils, arggroups, argparse_utils, consts, db_utils, iterables, nums, objects, printing
77
from library.utils.file_utils import trash
88
from library.utils.log_utils import log
@@ -85,6 +85,9 @@ def extract_metadata(path):
8585
def torrents_add():
8686
args = parse_args()
8787

88+
db_playlists.create(args)
89+
db_media.create(args)
90+
8891
scanned_set = set(arg_utils.gen_paths(args, default_exts=(".torrent",)))
8992

9093
known_hashes = set()

library/createdb/tube_add.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,9 @@ def tube_add(args=None) -> None:
3737
args = parse_args(SC.tube_add, usage=usage.tube_add)
3838
paths = arg_utils.gen_paths(args)
3939

40+
db_playlists.create(args)
41+
db_media.create(args)
42+
4043
if args.insert_only:
4144
args.db["media"].insert_all(
4245
[
@@ -89,6 +92,7 @@ def tube_update(args=None) -> None:
8992
sys.argv = ["tubeupdate", *args]
9093

9194
args = parse_args(SC.tube_update, usage=usage.tube_update)
95+
9296
tube_playlists = db_playlists.get_all(
9397
args,
9498
sql_filters=["AND extractor_key NOT IN ('Local', 'reddit_praw_redditor', 'reddit_praw_subreddit')"],

library/createdb/tube_backend.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ def run(self, info) -> tuple[list, dict]: # pylint: disable=arguments-renamed
130130

131131
entry = objects.dumbcopy(info)
132132
if entry:
133-
if db_playlists.media_exists(args, playlist_path, webpath) and not args.ignore_errors:
133+
if db_playlists.media_exists(args, webpath, playlist_path) and not args.ignore_errors:
134134
raise ExistingPlaylistVideoReached
135135

136136
if not info.get("playlist_id") or webpath == playlist_path:
@@ -172,9 +172,9 @@ def run(self, info) -> tuple[list, dict]: # pylint: disable=arguments-renamed
172172

173173
if args.action == consts.SC.tube_update:
174174
if added_media_count > count_before_extract:
175-
db_playlists.decrease_update_delay(args, playlist_path)
175+
db_playlists.update_more_frequently(args, playlist_path)
176176
else:
177-
db_playlists.increase_update_delay(args, playlist_path)
177+
db_playlists.update_less_frequently(args, playlist_path)
178178

179179

180180
def yt_subs_config(args):

library/createdb/web_add.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -183,15 +183,11 @@ def spider(args, paths: list):
183183
link = web.remove_apache_sorting_params(link_dict.pop("link"))
184184

185185
if link in traversed_paths or link in paths:
186-
continue
187-
if web.is_index(link):
188-
if web.is_subpath(path, link):
189-
paths.append(link)
190-
continue
191-
192-
if db_media.exists(args, link):
186+
pass
187+
elif web.is_subpath(path, link):
188+
paths.append(link)
189+
elif db_media.exists(args, link):
193190
known_paths.add(link)
194-
continue
195191
else:
196192
new_paths[link] = objects.merge_dict_values_str(new_paths.get(link) or {}, link_dict)
197193
else: # not HTML page
@@ -263,6 +259,9 @@ def web_add(args=None) -> None:
263259

264260
args = parse_args(consts.SC.web_add, usage=usage.web_add)
265261

262+
db_playlists.create(args)
263+
db_media.create(args)
264+
266265
if args.insert_only:
267266
media_new = set()
268267
media_known = set()
@@ -300,7 +299,7 @@ def web_update(args=None) -> None:
300299

301300
web_playlists = db_playlists.get_all(
302301
args,
303-
sql_filters="extractor_key = 'WebFolder'",
302+
sql_filters=["AND extractor_key = 'WebFolder'"],
304303
order_by="""length(path)-length(REPLACE(path, '/', '')) desc
305304
, random()
306305
""",
@@ -320,9 +319,9 @@ def web_update(args=None) -> None:
320319
new_media = spider(args_env, [playlist["path"]])
321320

322321
if new_media > 0:
323-
db_playlists.decrease_update_delay(args, playlist["path"])
322+
db_playlists.update_more_frequently(args, playlist["path"])
324323
else:
325-
db_playlists.increase_update_delay(args, playlist["path"])
324+
db_playlists.update_less_frequently(args, playlist["path"])
326325

327326
if playlist_count > 3:
328327
time.sleep(random.uniform(0.05, 2))

library/mediadb/db_history.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,15 @@ def exists(args, media_id) -> bool:
2121
def create(args):
2222
args.db.create_table(
2323
"history",
24-
{"media_id": int, "time_played": int, "playhead": int, "done": int},
24+
{
25+
"media_id": int,
26+
"time_played": int,
27+
"playhead": int,
28+
"done": bool,
29+
},
2530
pk="id",
2631
if_not_exists=True,
32+
strict=True,
2733
)
2834

2935

0 commit comments

Comments
 (0)