Skip to content

Commit 4049789

Browse files
committed
2.5.014
1 parent f490efa commit 4049789

File tree

6 files changed

+19
-8
lines changed

6 files changed

+19
-8
lines changed

.github/README.md

+5-1
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ To stop playing press Ctrl+C in either the terminal or mpv
9595
<details><summary>List all subcommands</summary>
9696

9797
$ library
98-
xk media library subcommands (v2.5.013)
98+
xk media library subcommands (v2.5.014)
9999

100100
Create database subcommands:
101101
╭───────────────┬────────────────────────────────────────────────────╮
@@ -1650,6 +1650,10 @@ BTW, for some cols like time_deleted you'll need to specify a where clause so th
16501650

16511651
library download dl.db --prefix ~/output/path/root/
16521652

1653+
But you can sort; eg. oldest first
1654+
1655+
library download dl.db -u m.time_modified,m.time_created
1656+
16531657
Limit downloads to a specified playlist URLs or substring (TODO: https://github.com/chapmanjacobd/library/issues/31)
16541658

16551659
library download dl.db https://www.youtube.com/c/BlenderFoundation/videos

xklb/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "2.5.013"
1+
__version__ = "2.5.014"

xklb/db_media.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ def consolidate(v: dict) -> Optional[dict]:
8686
duration = v.pop("duration", None)
8787
cv["duration"] = 0 if not duration else int(duration)
8888
cv["time_uploaded"] = upload_date
89-
cv["time_created"] = consts.now()
89+
cv["time_created"] = iterables.safe_unpack(v.pop("time_created", None), consts.now())
9090
cv["time_modified"] = 0 # this should be 0 if the file has never been downloaded
9191
cv["time_deleted"] = 0
9292
cv["time_downloaded"] = 0
@@ -113,7 +113,7 @@ def consolidate(v: dict) -> Optional[dict]:
113113
)
114114

115115
# extractor_key should only be in playlist table
116-
cv["extractor_id"] = v.pop("id", None)
116+
cv["extractor_id"] = iterables.safe_unpack(v.pop("extractor_id", None), v.pop("id", None))
117117
cv["title"] = iterables.safe_unpack(v.pop("title", None), v.get("playlist_title"))
118118
cv["width"] = v.pop("width", None)
119119
cv["height"] = v.pop("height", None)

xklb/fs_extract.py

+3
Original file line numberDiff line numberDiff line change
@@ -170,6 +170,9 @@ def extract_metadata(mp_args, path) -> Optional[Dict[str, int]]:
170170
ext = path.rsplit(".", 1)[-1].lower()
171171
is_scan_all_files = getattr(mp_args, "scan_all_files", False)
172172

173+
if media["type"] == "directory":
174+
return None
175+
173176
if not Path(path).exists():
174177
return media
175178

xklb/media/dedupe.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -387,11 +387,11 @@ def get_fs_duplicates(args) -> List[dict]:
387387
else:
388388
path_media_map[path]["hash"] = hash
389389
args.db["media"].upsert(path_media_map[path], pk=["path"], alter=True) # save sample-hash back to db
390-
media = [path_media_map[d['path']] for d in media if d['path'] in path_media_map]
390+
media = [path_media_map[d["path"]] for d in media if d["path"] in path_media_map]
391391

392392
sample_hash_groups = defaultdict(set)
393393
for m in media:
394-
sample_hash_groups[m["hash"]].add(m['path'])
394+
sample_hash_groups[m["hash"]].add(m["path"])
395395
sample_hash_groups = [l for l in sample_hash_groups.values() if len(l) > 1]
396396

397397
sample_hash_paths = set().union(*sample_hash_groups)
@@ -414,10 +414,10 @@ def get_fs_duplicates(args) -> List[dict]:
414414

415415
dup_media = []
416416
for hash_group_paths in full_hash_groups:
417-
paths = [d['path'] for d in media if d['path'] in hash_group_paths] # get the correct order from media
417+
paths = [d["path"] for d in media if d["path"] in hash_group_paths] # get the correct order from media
418418
keep_path = paths[0]
419419
dup_media.extend(
420-
{"keep_path": keep_path, "duplicate_path": p, "duplicate_size": path_media_map[keep_path]['size']}
420+
{"keep_path": keep_path, "duplicate_path": p, "duplicate_size": path_media_map[keep_path]["size"]}
421421
for p in paths[1:]
422422
)
423423

xklb/usage.py

+4
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,10 @@
66
77
library download dl.db --prefix ~/output/path/root/
88
9+
But you can sort; eg. oldest first
10+
11+
library download dl.db -u m.time_modified,m.time_created
12+
913
Limit downloads to a specified playlist URLs or substring (TODO: https://github.com/chapmanjacobd/library/issues/31)
1014
1115
library download dl.db https://www.youtube.com/c/BlenderFoundation/videos

0 commit comments

Comments
 (0)