-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcheck-lifecycle.py
More file actions
executable file
·1070 lines (891 loc) · 40.6 KB
/
check-lifecycle.py
File metadata and controls
executable file
·1070 lines (891 loc) · 40.6 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python3
"""check-lifecycle.py — Tier 2 lifecycle heuristics for GNOME extensions.
Usage: check-lifecycle.py EXTENSION_DIR
Checks:
- R-LIFE-01: Signal connection/disconnection balance
- R-LIFE-02: Untracked timeout sources
- R-LIFE-03: Missing enable/disable methods
- R-LIFE-04: connectObject migration advisory
- R-LIFE-05: Async/await without _destroyed guard
- R-LIFE-06: timeout_add/idle_add without SOURCE_REMOVE/SOURCE_CONTINUE
- R-LIFE-07: DBus proxy creation without disconnect
- R-LIFE-08: File monitor without cancel
- R-LIFE-09: Keybinding add without remove
- R-LIFE-10: InjectionManager without clear() + prototype override detection
- R-LIFE-11: Lock screen signal safety
- R-LIFE-12: Stored timeout/idle ID without Source.remove() in disable()
- R-LIFE-13: Selective disable() detection (conditional return skips cleanup)
- R-LIFE-14: unlock-dialog comment requirement
- R-LIFE-15: Soup.Session without abort() in disable/destroy
- R-LIFE-16: DBus export without unexport in disable/destroy
- R-LIFE-17: Timeout ID reassignment without prior Source.remove()
- R-LIFE-18: Subprocess without cancellation in disable/destroy
- R-LIFE-20: Bus name ownership without release
- R-SEC-16: Clipboard + keybinding cross-reference
- R-FILE-07: Missing export default class
Output: PIPE-delimited lines: STATUS|check-name|detail
"""
import json
import os
import re
import sys
def result(status, check, detail):
print(f"{status}|{check}|{detail}")
def find_js_files(ext_dir, exclude_prefs=False):
"""Find JS files, optionally excluding prefs.js."""
skip_dirs = {'node_modules', '.git', '__pycache__'}
files = []
for root, dirs, filenames in os.walk(ext_dir):
dirs[:] = [d for d in dirs if d not in skip_dirs]
for name in filenames:
if name.endswith('.js'):
if exclude_prefs and name == 'prefs.js':
continue
files.append(os.path.join(root, name))
return files
def read_file(path):
with open(path, encoding='utf-8', errors='replace') as f:
return f.read()
def strip_comments(content):
"""Remove single-line and block comments from JS content."""
# Remove block comments
content = re.sub(r'/\*.*?\*/', '', content, flags=re.DOTALL)
# Remove single-line comments
content = re.sub(r'//.*$', '', content, flags=re.MULTILINE)
return content
def check_enable_disable(ext_dir):
"""R-LIFE-03: extension.js must define enable() and disable()."""
ext_js = os.path.join(ext_dir, 'extension.js')
if not os.path.isfile(ext_js):
return # file-structure check handles this
content = strip_comments(read_file(ext_js))
has_enable = bool(re.search(r'\benable\s*\(', content))
has_disable = bool(re.search(r'\bdisable\s*\(', content))
if not has_enable:
result("FAIL", "lifecycle/enable-method", "extension.js missing enable() method")
if not has_disable:
result("FAIL", "lifecycle/disable-method", "extension.js missing disable() method")
if has_enable and has_disable:
result("PASS", "lifecycle/enable-disable", "enable() and disable() both defined")
def check_default_export(ext_dir):
"""R-FILE-07: extension.js should have export default class."""
ext_js = os.path.join(ext_dir, 'extension.js')
if not os.path.isfile(ext_js):
return
content = strip_comments(read_file(ext_js))
if not re.search(r'\bexport\s+default\s+class\b', content):
result("WARN", "lifecycle/default-export",
"extension.js missing 'export default class' — required for GNOME 45+")
else:
result("PASS", "lifecycle/default-export", "extension.js has default export class")
def check_signal_balance(ext_dir):
"""R-LIFE-01: Signal connection/disconnection balance."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
pure_connects = 0
pure_disconnects = 0
connect_objects = 0
smart_connects = 0
# Check for SignalTracker/SignalManager — file-level auto-cleanup recognition
has_signal_tracker = False
for filepath in js_files:
content = read_file(filepath)
if re.search(r'\b(SignalTracker|SignalManager)\b', content):
has_signal_tracker = True
for line in content.splitlines():
if re.search(r'\.connectObject\s*\(', line):
connect_objects += 1
elif re.search(r'\.connectSmart\s*\(', line):
smart_connects += 1
elif re.search(r"\.connect\s*\(\s*['\"]", line) and not re.search(r'\.disconnect', line):
if re.search(r"\.connect\s*\(\s*['\"]destroy['\"]", line):
pass # 'destroy' signal — inherently self-cleaning
elif not re.search(r'this[._]', line):
pass # local variable signal — inherently scoped
else:
pure_connects += 1
if re.search(r'\.disconnectObject\s*\(', line):
pass # auto-cleanup
elif re.search(r'\.disconnectSmart\s*\(', line):
pass # auto-cleanup
elif re.search(r'\.disconnect\s*\(', line) and not re.search(r'\.connect\s*\(', line):
pure_disconnects += 1
# connectObject/connectSmart calls auto-disconnect, so only manual connects need matching disconnects
auto_managed = connect_objects + smart_connects
imbalance = pure_connects - pure_disconnects
if imbalance > 1 and not has_signal_tracker:
result("WARN", "lifecycle/signal-balance",
f"{pure_connects} manual .connect() calls but only {pure_disconnects} "
f".disconnect() calls — verify all signals are disconnected in disable()")
else:
result("PASS", "lifecycle/signal-balance",
f"Signal balance OK ({pure_connects} connects, {pure_disconnects} disconnects, "
f"{auto_managed} auto-managed)")
def check_untracked_timeouts(ext_dir):
"""R-LIFE-02: timeout_add/idle_add without stored return value."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
untracked = []
for filepath in js_files:
rel = os.path.relpath(filepath, ext_dir)
for lineno, line in enumerate(read_file(filepath).splitlines(), 1):
stripped = line.strip()
# Skip comments
if stripped.startswith('//') or stripped.startswith('*'):
continue
# Match timeout_add, idle_add, setTimeout, or setInterval calls
if re.search(r'(timeout_add|idle_add|setTimeout|setInterval)\s*\(', stripped):
# Check if the return value is assigned
if not re.search(r'(=|return)\s*.*(timeout_add|idle_add|setTimeout|setInterval)', stripped):
untracked.append(f"{rel}:{lineno}")
if untracked:
for loc in untracked:
result("WARN", "lifecycle/untracked-timeout",
f"{loc}: timer/idle return value not stored — "
f"cannot be removed in disable()")
else:
result("PASS", "lifecycle/untracked-timeout",
"All timeout/idle sources have stored IDs")
def check_connect_object_migration(ext_dir):
"""R-LIFE-04: Suggest connectObject when 3+ manual connect/disconnect pairs."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
manual_pairs = 0
has_connect_object = False
for filepath in js_files:
content = read_file(filepath)
if re.search(r'\.(connectObject|connectSmart)\s*\(', content):
has_connect_object = True
# Count lines that store a connect ID
manual_pairs += len(re.findall(
r'=\s*\w+\.connect\s*\(', content
))
if manual_pairs >= 3 and not has_connect_object:
result("WARN", "lifecycle/connectObject-migration",
f"{manual_pairs} manual signal connections found — "
f"consider using connectObject() for automatic cleanup")
else:
result("PASS", "lifecycle/connectObject-migration",
"Signal connection pattern OK")
def check_async_destroyed_guard(ext_dir):
"""R-LIFE-05: Async functions with await should check _destroyed after resuming."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
has_async_await = False
has_destroyed_flag = False
for filepath in js_files:
content = strip_comments(read_file(filepath))
if re.search(r'\basync\b', content) and re.search(r'\bawait\b', content):
has_async_await = True
if re.search(r'\b_destroyed\b', content) or re.search(r'\b_isDestroyed\b', content):
has_destroyed_flag = True
if has_async_await and not has_destroyed_flag:
result("WARN", "lifecycle/async-destroyed-guard",
"async/await used without _destroyed or _isDestroyed guard — "
"extension may act on stale state after disable()")
elif has_async_await and has_destroyed_flag:
result("PASS", "lifecycle/async-destroyed-guard",
"async/await with _destroyed guard detected")
# If no async/await, skip silently
def check_timeout_return_value(ext_dir):
"""R-LIFE-06: timeout_add/idle_add callbacks should return SOURCE_REMOVE or SOURCE_CONTINUE."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
missing = []
for filepath in js_files:
rel = os.path.relpath(filepath, ext_dir)
lines = read_file(filepath).splitlines()
for i, line in enumerate(lines):
stripped = line.strip()
# Skip comments
if stripped.startswith('//') or stripped.startswith('*'):
continue
if re.search(r'(timeout_add|idle_add)\s*\(', stripped):
# Look ahead up to 20 lines for SOURCE_REMOVE or SOURCE_CONTINUE
lookahead = '\n'.join(lines[i:i + 20])
if 'SOURCE_REMOVE' not in lookahead and 'SOURCE_CONTINUE' not in lookahead:
missing.append(f"{rel}:{i + 1}")
if len(missing) >= 3:
break
if len(missing) >= 3:
break
if missing:
locs = ', '.join(missing)
result("WARN", "lifecycle/timeout-return-value",
f"timeout_add/idle_add callback missing SOURCE_REMOVE/SOURCE_CONTINUE: {locs}")
else:
result("PASS", "lifecycle/timeout-return-value",
"All timeout/idle callbacks return SOURCE_REMOVE or SOURCE_CONTINUE")
def check_keybinding_cleanup(ext_dir):
"""R-LIFE-09: addKeybinding must have matching removeKeybinding."""
js_files = find_js_files(ext_dir)
if not js_files:
return
add_count = 0
remove_count = 0
for filepath in js_files:
content = strip_comments(read_file(filepath))
add_count += len(re.findall(r'\.addKeybinding\s*\(', content))
remove_count += len(re.findall(r'\.removeKeybinding\s*\(', content))
if add_count > 0 and remove_count == 0:
result("FAIL", "lifecycle/keybinding-cleanup",
f"{add_count} addKeybinding() call(s) but no removeKeybinding() — "
f"keybindings will leak after disable()")
elif add_count > 0 and remove_count > 0:
result("PASS", "lifecycle/keybinding-cleanup",
f"Keybinding balance OK ({add_count} add, {remove_count} remove)")
# If no keybindings, skip silently
def check_dbus_proxy_lifecycle(ext_dir):
"""R-LIFE-07: DBus proxy creation should have matching disconnect."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
has_proxy = False
has_disconnect = False
for filepath in js_files:
content = strip_comments(read_file(filepath))
if (re.search(r'Gio\.DBusProxy\.new_for_bus', content) or
re.search(r'new\s+Gio\.DBusProxy', content) or
re.search(r'makeProxyWrapper', content)):
has_proxy = True
if re.search(r'disconnectObject', content) or re.search(r'\.disconnect\s*\(', content):
has_disconnect = True
if has_proxy and not has_disconnect:
result("WARN", "lifecycle/dbus-proxy-cleanup",
"DBus proxy created but no disconnect/disconnectObject found — "
"signals may leak after disable()")
elif has_proxy and has_disconnect:
result("PASS", "lifecycle/dbus-proxy-cleanup",
"DBus proxy with disconnect pattern detected")
# If no proxy, skip silently
def check_file_monitor_lifecycle(ext_dir):
"""R-LIFE-08: File monitors should be cancelled in disable()."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
has_monitor = False
has_cancel = False
for filepath in js_files:
content = strip_comments(read_file(filepath))
if (re.search(r'\.monitor_file\s*\(', content) or
re.search(r'\.monitor_directory\s*\(', content) or
re.search(r'\.monitor_children\s*\(', content)):
has_monitor = True
if re.search(r'\.cancel\s*\(', content):
has_cancel = True
if has_monitor and not has_cancel:
result("WARN", "lifecycle/file-monitor-cleanup",
"File monitor created but no .cancel() found — "
"monitor will continue after disable()")
elif has_monitor and has_cancel:
result("PASS", "lifecycle/file-monitor-cleanup",
"File monitor with cancel pattern detected")
# If no monitors, skip silently
def check_injection_manager(ext_dir):
"""R-LIFE-10: InjectionManager must be cleared in disable().
Also detects direct prototype overrides (WS1-D enhancement)."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
has_injection = False
has_clear = False
for filepath in js_files:
content = strip_comments(read_file(filepath))
if re.search(r'new\s+InjectionManager\s*\(', content):
has_injection = True
if re.search(r'\.clear\s*\(', content):
has_clear = True
if has_injection and not has_clear:
result("FAIL", "lifecycle/injection-cleanup",
"new InjectionManager() found but no .clear() call — "
"injections will persist after disable()")
elif has_injection and has_clear:
result("PASS", "lifecycle/injection-cleanup",
"InjectionManager with .clear() cleanup detected")
# WS1-D: Detect direct prototype overrides
prototype_overrides = []
seen_overrides = set()
for filepath in js_files:
content = strip_comments(read_file(filepath))
rel = os.path.relpath(filepath, ext_dir)
# SomeClass.prototype.methodName = ...
for m in re.finditer(r'(\w+\.prototype\.\w+)\s*=', content):
key = (rel, m.group(1))
if key not in seen_overrides:
seen_overrides.add(key)
prototype_overrides.append(key)
# Object.assign(SomeClass.prototype, ...)
for m in re.finditer(r'Object\.assign\s*\(\s*(\w+\.prototype)', content):
label = f"Object.assign({m.group(1)}, ...)"
key = (rel, label)
if key not in seen_overrides:
seen_overrides.add(key)
prototype_overrides.append(key)
if prototype_overrides:
# Check if disable() restores prototypes
ext_js = os.path.join(ext_dir, 'extension.js')
disable_restores = False
if os.path.isfile(ext_js):
ext_content = strip_comments(read_file(ext_js))
disable_match = re.search(r'\bdisable\s*\(\s*\)\s*\{', ext_content)
if disable_match:
start = disable_match.end()
depth = 1
pos = start
while pos < len(ext_content) and depth > 0:
if ext_content[pos] == '{':
depth += 1
elif ext_content[pos] == '}':
depth -= 1
pos += 1
disable_body = ext_content[start:pos]
# Check for prototype restoration in disable
if re.search(r'\w+\.prototype\.\w+\s*=', disable_body):
disable_restores = True
if not disable_restores:
for rel, override in prototype_overrides:
result("WARN", "lifecycle/prototype-override",
f"{rel}: {override} — direct prototype modification "
f"should be restored in disable()")
def check_selective_disable(ext_dir):
"""R-LIFE-13: Detect conditional returns in disable() that skip cleanup."""
ext_js = os.path.join(ext_dir, 'extension.js')
if not os.path.isfile(ext_js):
return
content = strip_comments(read_file(ext_js))
# Extract disable() body using brace depth
disable_match = re.search(r'\bdisable\s*\(\s*\)\s*\{', content)
if not disable_match:
return
start = disable_match.end()
depth = 1
pos = start
while pos < len(content) and depth > 0:
if content[pos] == '{':
depth += 1
elif content[pos] == '}':
depth -= 1
pos += 1
disable_body = content[start:pos]
# Look for early returns that skip cleanup: `if (...) return;`
# But exclude legitimate null guards like `if (this._x) { this._x.destroy(); }`
# and `if (!this._x) return;` (null guard for a single resource)
early_return_patterns = re.finditer(
r'if\s*\(([^)]+)\)\s*return\s*;', disable_body
)
for m in early_return_patterns:
condition = m.group(1).strip()
# Exclude null guards: `if (!this._x)` — these protect a single destroy
if re.match(r'^!\s*this\._\w+$', condition):
continue
# Flag session mode / enabled state checks that skip all cleanup
result("FAIL", "lifecycle/selective-disable",
f"disable() has conditional return: 'if ({condition}) return;' — "
f"disable() must always clean up all resources regardless of state")
return # Report once
result("PASS", "lifecycle/selective-disable",
"disable() does not conditionally skip cleanup")
def check_unlock_dialog_comment(ext_dir):
"""R-LIFE-14: unlock-dialog session mode should have explanatory comment in disable()."""
metadata_path = os.path.join(ext_dir, 'metadata.json')
if not os.path.isfile(metadata_path):
return
try:
with open(metadata_path, encoding='utf-8') as f:
metadata = json.load(f)
except (json.JSONDecodeError, OSError):
return
session_modes = metadata.get('session-modes', [])
if 'unlock-dialog' not in session_modes:
return # Not relevant
ext_js = os.path.join(ext_dir, 'extension.js')
if not os.path.isfile(ext_js):
return
# Read raw content (not stripped) to preserve comments
raw_content = read_file(ext_js)
# Extract disable() body from raw content
disable_match = re.search(r'\bdisable\s*\(\s*\)\s*\{', raw_content)
if not disable_match:
return
start = disable_match.end()
depth = 1
pos = start
while pos < len(raw_content) and depth > 0:
if raw_content[pos] == '{':
depth += 1
elif raw_content[pos] == '}':
depth -= 1
pos += 1
disable_body = raw_content[start:pos]
# Look for comments mentioning unlock/lock/session/mode
comment_keywords = re.search(
r'//.*\b(unlock|lock|session|mode)\b', disable_body, re.IGNORECASE
)
if not comment_keywords:
result("WARN", "lifecycle/unlock-dialog-comment",
"extension declares 'unlock-dialog' session mode but disable() has no "
"comment explaining lock screen behavior — add a comment documenting "
"which resources need special handling on the lock screen")
else:
result("PASS", "lifecycle/unlock-dialog-comment",
"disable() has comment documenting lock screen behavior")
def check_clipboard_keybinding(ext_dir):
"""R-SEC-16: Clipboard access combined with keybinding registration is suspicious."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
for filepath in js_files:
content = strip_comments(read_file(filepath))
rel = os.path.relpath(filepath, ext_dir)
has_clipboard = bool(re.search(r'St\.Clipboard', content))
has_keybinding = bool(re.search(r'addKeybinding', content))
if has_clipboard and has_keybinding:
result("WARN", "lifecycle/clipboard-keybinding",
f"{rel}: St.Clipboard and addKeybinding() in same file — "
f"review whether keybinding-triggered clipboard access is intended "
f"and not a keylogger pattern")
return # Report once
# No co-occurrence found, skip silently
def check_lockscreen_signals(ext_dir):
"""R-LIFE-11: Lock screen signal safety — keyboard signals with unlock-dialog mode."""
metadata_path = os.path.join(ext_dir, 'metadata.json')
if not os.path.isfile(metadata_path):
return
try:
with open(metadata_path, encoding='utf-8') as f:
metadata = json.load(f)
except (json.JSONDecodeError, OSError):
return
session_modes = metadata.get('session-modes', [])
if 'unlock-dialog' not in session_modes:
return # Not relevant if extension doesn't run on lock screen
js_files = find_js_files(ext_dir, exclude_prefs=True)
keyboard_signals = ['key-press-event', 'key-release-event', 'captured-event']
for filepath in js_files:
content = strip_comments(read_file(filepath))
rel = os.path.relpath(filepath, ext_dir)
has_keyboard_signal = False
for sig in keyboard_signals:
if sig in content:
has_keyboard_signal = True
break
if has_keyboard_signal:
# Check for session mode guard
has_guard = bool(
re.search(r'(currentMode|sessionMode|unlock-dialog|session-modes)', content)
)
if not has_guard:
result("FAIL", "lifecycle/lockscreen-signals",
f"{rel}: keyboard signal connected but session-modes includes "
f"'unlock-dialog' — must disconnect or guard keyboard signals on lock screen")
else:
result("PASS", "lifecycle/lockscreen-signals",
f"{rel}: keyboard signal with session mode guard detected")
return # Only report once
# Has unlock-dialog mode but no keyboard signals — that's fine
def check_timeout_removal_in_disable(ext_dir):
"""R-LIFE-12: Stored timeout IDs should have Source.remove() in disable()."""
ext_js = os.path.join(ext_dir, 'extension.js')
if not os.path.isfile(ext_js):
return
content = strip_comments(read_file(ext_js))
# Find stored timeout IDs: this._foo = ...timeout_add/idle_add/setTimeout/setInterval...
stored_ids = set()
for m in re.finditer(r'this\.(_\w+)\s*=\s*.*?(timeout_add|idle_add|setTimeout|setInterval)', content):
stored_ids.add(m.group(1))
if not stored_ids:
return # No stored timeouts to check
# Extract disable() and destroy() bodies (cleanup may be in either)
cleanup_body = ''
for method_pat in [r'\bdisable\s*\(\s*\)\s*\{', r'\bdestroy\s*\(\s*\)\s*\{']:
for m_method in re.finditer(method_pat, content):
start = m_method.end()
depth = 1
pos = start
while pos < len(content) and depth > 0:
if content[pos] == '{':
depth += 1
elif content[pos] == '}':
depth -= 1
pos += 1
cleanup_body += content[start:pos] + '\n'
if not cleanup_body:
return # check_enable_disable handles missing disable()
# Check if Source.remove/clearTimeout/clearInterval is called in cleanup methods
has_remove = bool(re.search(
r'(Source\.remove|source_remove|clearTimeout|clearInterval)\s*\(', cleanup_body))
missing = []
for var_name in stored_ids:
# Check cleanup methods first, then fall back to file-wide search
# (cleanup may be in a helper method called from disable/destroy)
var_removed = bool(re.search(
rf'(Source\.remove|source_remove|clearTimeout|clearInterval)\s*\(\s*this\.{re.escape(var_name)}',
cleanup_body
))
if not var_removed:
# Also check the entire file — helper methods may clear timers
var_removed = bool(re.search(
rf'(Source\.remove|source_remove|clearTimeout|clearInterval)\s*\(\s*this\.{re.escape(var_name)}',
content
))
if not var_removed and not has_remove:
missing.append(var_name)
if missing:
for var_name in sorted(missing):
result("FAIL", "lifecycle/timeout-not-removed",
f"this.{var_name} stores timeout/idle source but no "
f"GLib.Source.remove() call found in disable()")
else:
result("PASS", "lifecycle/timeout-not-removed",
"All stored timeout/idle IDs have Source.remove() in disable()")
def check_pkexec_user_writable(ext_dir):
"""R-SEC-18: pkexec target must not be user-writable."""
js_files = find_js_files(ext_dir)
if not js_files:
return
user_writable_prefixes = ['/home/', '/tmp/', './', '../']
for filepath in js_files:
content = strip_comments(read_file(filepath))
rel = os.path.relpath(filepath, ext_dir)
# Match pkexec in argv arrays: ['pkexec', '/path/to/script']
for m in re.finditer(
r"""pkexec['"]\s*,\s*['"]([^'"]+)['"]""", content
):
target = m.group(1)
for prefix in user_writable_prefixes:
if target.startswith(prefix):
result("FAIL", "lifecycle/pkexec-user-writable",
f"{rel}: pkexec target '{target}' is user-writable — "
f"attacker can replace it with arbitrary code")
return
# Match pkexec in command strings: 'pkexec /path/to/script'
for m in re.finditer(
r"""['"]pkexec\s+([^'"]+)['"]""", content
):
target = m.group(1).split()[0]
for prefix in user_writable_prefixes:
if target.startswith(prefix):
result("FAIL", "lifecycle/pkexec-user-writable",
f"{rel}: pkexec target '{target}' is user-writable — "
f"attacker can replace it with arbitrary code")
return
def check_destroy_then_null(ext_dir):
"""GAP-004: destroy() calls should be followed by null assignment."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
violations = []
for filepath in js_files:
rel = os.path.relpath(filepath, ext_dir)
lines = read_file(filepath).splitlines()
for i, line in enumerate(lines):
stripped = line.strip()
if stripped.startswith('//') or stripped.startswith('*'):
continue
# Match this._xxx.destroy() or this._xxx?.destroy()
m = re.search(r'(this\._\w+)\??\.\bdestroy\s*\(', stripped)
if not m:
continue
prop = m.group(1) # e.g. this._widget
# Look ahead 5 lines for null assignment
lookahead = '\n'.join(lines[i:i + 6])
null_pattern = re.escape(prop) + r'\s*=\s*null\b'
if not re.search(null_pattern, lookahead):
violations.append(f"{rel}:{i + 1}")
if len(violations) >= 5:
break
if len(violations) >= 5:
break
if violations:
for loc in violations:
result("WARN", "lifecycle/destroy-no-null",
f"{loc}: .destroy() without null assignment — "
f"set reference to null after destroy to prevent stale access")
else:
result("PASS", "lifecycle/destroy-no-null",
"All destroy() calls followed by null assignment")
def check_dbus_export_lifecycle(ext_dir):
"""GAP-003: DBus exported interfaces must be unexported in disable()/destroy()."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
export_patterns = [
(r'\.export\s*\(', '.unexport('),
(r'\.export_action_group\s*\(', 'unexport_action_group('),
(r'\.export_menu_model\s*\(', 'unexport_menu_model('),
]
exports_found = []
all_content = ''
for filepath in js_files:
content = strip_comments(read_file(filepath))
all_content += content
rel = os.path.relpath(filepath, ext_dir)
for export_pat, _ in export_patterns:
# Skip ESM 'export' keyword — only match method calls on objects
for m in re.finditer(export_pat, content):
# Check that it's a method call (preceded by identifier or closing paren/bracket)
start = m.start()
if start > 0 and content[start - 1] not in (' ', '\t', '\n', ';', '{'):
exports_found.append((rel, export_pat))
break
if not exports_found:
return # No DBus exports found
# Check for matching unexports anywhere in extension code
has_unexport = False
for _, unexport_method in export_patterns:
if unexport_method.replace('(', '\\s*\\(').replace('.', '\\.') and \
re.search(re.escape(unexport_method).replace(r'\(', r'\s*\('), all_content):
has_unexport = True
break
if not has_unexport:
for rel, _ in exports_found:
result("FAIL", "lifecycle/dbus-export-leak",
f"{rel}: DBus interface exported but no .unexport() found — "
f"exported interfaces must be unexported in disable()")
return # Report once
else:
result("PASS", "lifecycle/dbus-export-leak",
"DBus export/unexport lifecycle OK")
def check_timeout_reassignment(ext_dir):
"""GAP-010: Timeout ID reassignment without prior Source.remove() leaks GLib sources."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
violations = []
for filepath in js_files:
rel = os.path.relpath(filepath, ext_dir)
content = strip_comments(read_file(filepath))
lines = content.splitlines()
for i, line in enumerate(lines):
# Match: this._xxx = ...timeout_add/idle_add/setTimeout/setInterval(
m = re.search(r'(this\._\w+)\s*=\s*.*?(timeout_add|idle_add|setTimeout|setInterval)\s*\(', line)
if not m:
continue
prop = m.group(1)
# Check if this property is assigned timeout/idle elsewhere too (reassignment pattern)
assign_count = len(re.findall(
re.escape(prop) + r'\s*=\s*.*?(timeout_add|idle_add|setTimeout|setInterval)\s*\(',
content
))
if assign_count < 2:
continue # Single assignment, not a reassignment pattern
# Look back 5 lines for Source.remove(this._xxx)
lookback = '\n'.join(lines[max(0, i - 5):i])
remove_pat = r'(Source\.remove|source_remove|clearTimeout|clearInterval)\s*\(\s*' + re.escape(prop)
if not re.search(remove_pat, lookback):
violations.append(f"{rel}:{i + 1}")
break # One per file
if violations:
for loc in violations:
result("WARN", "lifecycle/timeout-reassignment",
f"{loc}: timeout/idle ID reassigned without prior "
f"GLib.Source.remove() — may leak GLib sources")
else:
result("PASS", "lifecycle/timeout-reassignment",
"No timeout ID reassignment without removal detected")
def check_subprocess_cancellation(ext_dir):
"""GAP-012: Gio.Subprocess should have cancellation in disable()/destroy()."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
has_subprocess = False
has_cancel = False
for filepath in js_files:
content = strip_comments(read_file(filepath))
if (re.search(r'new\s+Gio\.Subprocess', content) or
re.search(r'Gio\.Subprocess\.new', content) or
re.search(r'Gio\.SubprocessLauncher', content)):
has_subprocess = True
if (re.search(r'\.force_exit\s*\(', content) or
re.search(r'\.send_signal\s*\(', content) or
re.search(r'cancellable.*\.cancel\s*\(', content, re.IGNORECASE)):
has_cancel = True
if has_subprocess and not has_cancel:
result("WARN", "lifecycle/subprocess-no-cancel",
"Gio.Subprocess created but no .force_exit(), .send_signal(), or "
"cancellable.cancel() found — subprocess may outlive disable()")
elif has_subprocess and has_cancel:
result("PASS", "lifecycle/subprocess-no-cancel",
"Subprocess with cancellation pattern detected")
# If no subprocess, skip silently
def check_clipboard_network(ext_dir):
"""GAP-025: Clipboard + network access cross-reference."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
has_clipboard = False
has_network = False
for filepath in js_files:
content = strip_comments(read_file(filepath))
if re.search(r'St\.Clipboard', content):
has_clipboard = True
if (re.search(r'Soup\.Session', content) or
re.search(r'Gio\.SocketClient', content) or
re.search(r'\bfetch\s*\(', content)):
has_network = True
if has_clipboard and has_network:
result("WARN", "lifecycle/clipboard-network",
"Extension accesses both St.Clipboard and network APIs — "
"manual review required to verify clipboard data is not exfiltrated")
# Skip silently if no co-occurrence
def check_soup_session_abort(ext_dir):
"""R-LIFE-15: Soup.Session should be aborted in disable()/destroy()."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
has_session = False
has_abort = False
for filepath in js_files:
content = strip_comments(read_file(filepath))
if (re.search(r'new\s+Soup\.Session', content) or
re.search(r'Soup\.Session\.new', content)):
has_session = True
if re.search(r'\.abort\s*\(', content):
has_abort = True
if has_session and not has_abort:
result("WARN", "lifecycle/soup-session-abort",
"Soup.Session created but no .abort() found — "
"pending requests will continue after disable()")
elif has_session and has_abort:
result("PASS", "lifecycle/soup-session-abort",
"Soup.Session with .abort() cleanup detected")
# If no session, skip silently
def check_bus_name_lifecycle(ext_dir):
"""R-LIFE-20: D-Bus bus name ownership must be released in disable()/destroy()."""
js_files = find_js_files(ext_dir, exclude_prefs=True)
if not js_files:
return
has_own = False
has_unown = False
for filepath in js_files:
content = strip_comments(read_file(filepath))
if (re.search(r'\bbus_own_name\b', content) or
re.search(r'\.own_name\s*\(', content)):
has_own = True
if (re.search(r'\bbus_unown_name\b', content) or
re.search(r'\.unown_name\s*\(', content)):
has_unown = True
if has_own and not has_unown:
result("WARN", "lifecycle/bus-name-ownership",
"Gio.bus_own_name() found but no bus_unown_name() — "
"bus name will not be released after disable()")
elif has_own and has_unown:
result("PASS", "lifecycle/bus-name-ownership",
"Bus name own/unown lifecycle OK")
# If no bus name ownership, skip silently
def check_widget_lifecycle(ext_dir):
"""Detect widgets created in enable() but not destroyed in disable()."""
ext_file = os.path.join(ext_dir, 'extension.js')
if not os.path.isfile(ext_file):
return
content = read_file(ext_file)
clean = strip_comments(content)
lines = clean.splitlines()
# Find widgets assigned to this._xxx in enable()
widget_re = re.compile(
r'this\.(_\w+)\s*=\s*new\s+'
r'(St\.\w+|PanelMenu\.\w+|PopupMenu\.\w+|Clutter\.\w+)')
destroy_re_template = r'this\.{name}\.(destroy|remove_child|remove_all_children)\s*\('
null_re_template = r'this\.{name}\s*=\s*null'
in_enable = False
in_disable = False
brace_depth = 0
created_widgets = {} # name -> line number
for i, line in enumerate(lines):
stripped = line.strip()
if re.search(r'\benable\s*\(', stripped):
in_enable = True
brace_depth = 0
if re.search(r'\bdisable\s*\(', stripped):
in_disable = True
brace_depth = 0
if in_enable:
brace_depth += stripped.count('{') - stripped.count('}')
m = widget_re.search(stripped)
if m:
created_widgets[m.group(1)] = i + 1
if brace_depth <= 0 and '{' in stripped:
in_enable = False
if in_disable:
brace_depth += stripped.count('{') - stripped.count('}')
if brace_depth <= 0 and '{' in stripped:
in_disable = False
if not created_widgets:
result("PASS", "lifecycle/widget-destroy", "No widgets tracked in enable()")
return
# Check if each widget is destroyed or nulled in disable()
leaked = []
for name, lineno in created_widgets.items():
destroy_pat = destroy_re_template.format(name=re.escape(name))
null_pat = null_re_template.format(name=re.escape(name))
if not re.search(destroy_pat, clean) and not re.search(null_pat, clean):
leaked.append(f"{name}(L{lineno})")
if leaked:
result("WARN", "lifecycle/widget-destroy",
f"Widget(s) created in enable() but not destroyed/nulled in disable(): "
f"{', '.join(leaked[:5])}")
else:
result("PASS", "lifecycle/widget-destroy",