-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcollimation_overlay.py
More file actions
3122 lines (2723 loc) · 123 KB
/
collimation_overlay.py
File metadata and controls
3122 lines (2723 loc) · 123 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# =============================================================================
# SharpCap Collimation Overlay Script
# =============================================================================
#
# PURPOSE:
# Draws concentric circle overlays on a defocused star (donut) to aid
# collimation of reflector telescopes. A defocused star in a reflector
# appears as a donut: a bright ring (primary mirror) with a dark center
# (secondary mirror shadow). Perfect collimation = concentric circles.
# This script auto-detects both circles, measures their offset, and
# shows correction guidance in real time on SharpCap's live preview.
#
# ARCHITECTURE:
# The script registers an event handler on SharpCap's BeforeFrameDisplay
# event. This handler fires on every displayed frame, allowing us to:
# 1. Read pixel data via frame.GetFrameBitmap() + LockBits
# 2. Analyze the image for a donut pattern (radial ray casting)
# 3. Fit circles to detected edges (Kasa least-squares method)
# 4. Draw overlays via frame.GetAnnotationGraphics() (frame annotations)
# with fallback to frame.GetDrawableBitmap().GetGraphics()
#
# Performance optimizations (when APIs are available):
# - frame.GetAnnotationGraphics(): avoids bitmap alloc/dispose per frame
# - frame.CalculateHistogram(): gets peak brightness natively (skips pixel loop)
# - frame.CutROI(): analyzes only the donut region when tracking (smaller buffer)
# - frame.Index: native frame counter (avoids manual tracking)
#
# The script itself runs once to set up the handler and toolbar button,
# then exits. The handler remains attached and runs per-frame until
# stop() is called or SharpCap closes.
#
# SHARPCAP API NOTES (4.1+):
# - GetAnnotationGraphics() returns a graphics context for frame annotations
# drawn over the frame without bitmap overhead. Same API as GDI+ wrapper.
# - GetDrawableBitmap().GetGraphics() is the fallback — returns WinformGraphicsImpl.
# Most GDI+ methods work but:
# * FillRectangle requires (brush, RectangleF) not (brush, x, y, w, h)
# * SmoothingMode/DashStyle may not be supported (wrapped in try/except)
# - GetDrawableBitmap().GetBitmap() returns the OVERLAY layer (blank),
# NOT the camera frame. Use frame.GetFrameBitmap() for actual pixels.
# - CalculateHistogram() returns HistogramData with centile accessors.
# - CutROI(x, y, w, h) returns a sub-frame for focused processing.
# Must call .Release() on the returned copy.
# - frame.Index provides a native frame sequence number.
# - AddCustomButton takes 4 args but the arg order varies by version.
# We try 3 orderings and use whichever works.
#
# USAGE:
# 1. Open SharpCap, connect camera, start live preview
# 2. Defocus a bright star until you see a donut shape
# 3. Tools > Scripting Console > File > Run Script > select this file
# 4. The settings palette opens automatically — adjust as needed
# 5. Close the palette (X or Close button) to stop the overlay
#
# Requires: SharpCap Pro 4.1+ (scripting is a Pro feature)
# =============================================================================
import clr
import math
import time
import os as _os
# .NET assemblies for drawing and pixel access
clr.AddReference("System.Drawing")
from System.Drawing import (
Color, Pen, SolidBrush, Font, FontFamily, FontStyle,
Bitmap, Rectangle, RectangleF, PointF, StringFormat,
)
from System.Drawing.Imaging import ImageLockMode, PixelFormat
from System.Drawing.Drawing2D import SmoothingMode, DashStyle
from System.Runtime.InteropServices import Marshal
from System import Array, Byte
import System.Drawing
# .NET WinForms for settings dialog
clr.AddReference("System.Windows.Forms")
import System.Windows.Forms
from System.Windows.Forms import (
Form, TabControl, TabPage, Label, NumericUpDown, TrackBar,
Button, Panel, ColorDialog, CheckBox, ToolTip, DockStyle, Padding,
FormBorderStyle, FormStartPosition, AnchorStyles,
TickStyle, Orientation, MessageBox, MessageBoxButtons,
MessageBoxIcon, DialogResult, AutoScaleMode as WinFormsAutoScaleMode,
)
VERSION = "2.0.3"
# =============================================================================
# CONFIGURATION SYSTEM
# =============================================================================
# All settings are stored in _config dict, populated from _DEFAULTS on startup
# and optionally overridden by a saved config file. The settings dialog and
# console commands modify _config directly — changes take effect on the next frame.
# Default values for all configuration options.
# Colors stored as (A, R, G, B) tuples for serialization.
_DEFAULTS = {
# Overlay display toggles
"SHOW_CIRCLES": True, # Inner + outer detection circles
"SHOW_INFO_PANEL": True, # Detailed stats panel (top-left)
"SHOW_BOTTOM_BAR": True, # X/Y offset readout bar (bottom)
"SHOW_CORRECTION_ARROWS": True, # Directional correction arrows + labels
"SHOW_ALIGNMENT_CROSSHAIRS": False, # Full-diameter crosshairs for visual alignment
"SHOW_BRIGHTNESS_SCALE": True, # Brightness scale bar above bottom bar
"SHOW_TARGET_SIZE": True, # Vertical donut-size guide on right edge
# Overlay colors (ARGB)
"OUTER_CIRCLE_COLOR": (220, 255, 60, 60), # Red - primary mirror edge
"INNER_CIRCLE_COLOR": (220, 60, 255, 60), # Green - secondary shadow edge
"CROSSHAIR_COLOR": (180, 255, 255, 0), # Yellow - outer center marker
"OFFSET_LINE_COLOR": (220, 0, 220, 255), # Cyan - offset vector & arrows
"TEXT_COLOR": (240, 255, 255, 255), # White - info panel text
"TEXT_BG_COLOR": (160, 0, 0, 0), # Semi-transparent black background
# Line widths (pixels) — kept thin for viewing at 200-300% magnification
"CIRCLE_PEN_WIDTH": 1.0,
"CROSSHAIR_PEN_WIDTH": 1.0,
"OFFSET_PEN_WIDTH": 1.0,
# Crosshair size (pixels extending from center, used in modes 0-2)
"CROSSHAIR_LENGTH": 20,
# Text sizes (points)
"TEXT_FONT_SIZE": 9,
"BOTTOM_BAR_FONT_SIZE": 10,
"ARROW_LABEL_FONT_SIZE": 8,
# Detection parameters
"NUM_RAYS": 90, # Radial rays for edge detection
"BRIGHTNESS_THRESHOLD_PERCENT": 30, # % of peak brightness defining donut edge
"CENTROID_DOWNSAMPLE": 4, # Sample every Nth pixel for centroid
"MIN_DONUT_RADIUS": 15, # Ignore circles smaller than this (px)
"RAY_STEP": 1, # Pixel step along each ray
"ROI_MARGIN_FACTOR": 2.0, # CutROI margin as multiple of outer radius
# Tracking validation
"MAX_RADIUS_CHANGE_PCT": 15, # Max % change in outer radius per detection
"MAX_CENTER_JUMP_PCT": 20, # Max center jump as % of outer radius
"STALE_FRAME_LIMIT": 15, # Consecutive rejections before re-detect
# Smoothing
"SMOOTHING_FACTOR": 0.6, # EMA factor (0.0 = none, 0.9 = heavy)
# Performance
"ANALYSIS_INTERVAL": 2, # Analyze every Nth frame
# Outer edge detection algorithm
"DETECTION_ALGORITHM": "threshold_crossing", # "threshold_crossing", "edge_to_dark", "steepest_gradient"
"GRAD_WINDOW": 10, # Pixels to average for gradient calculation
}
# Color keys are identified by suffix for serialization
_COLOR_KEYS = frozenset(k for k in _DEFAULTS if k.endswith("_COLOR"))
# Float keys (non-integer numeric)
_FLOAT_KEYS = frozenset(["CIRCLE_PEN_WIDTH", "CROSSHAIR_PEN_WIDTH", "OFFSET_PEN_WIDTH",
"ROI_MARGIN_FACTOR", "SMOOTHING_FACTOR"])
# Bool keys (on/off toggles)
_BOOL_KEYS = frozenset(k for k in _DEFAULTS if isinstance(_DEFAULTS[k], bool))
# String keys (enum-like text values)
_STRING_KEYS = frozenset(["DETECTION_ALGORITHM"])
# Live config dict — read by the frame handler on every frame
_config = {}
def _tuple_to_color(t):
"""Convert (A, R, G, B) tuple to System.Drawing.Color."""
return Color.FromArgb(int(t[0]), int(t[1]), int(t[2]), int(t[3]))
def _color_to_tuple(c):
"""Convert System.Drawing.Color to (A, R, G, B) tuple."""
return (c.A, c.R, c.G, c.B)
def _init_config():
"""Populate _config from _DEFAULTS, converting color tuples to Color objects."""
for key, val in _DEFAULTS.items():
if key in _COLOR_KEYS:
_config[key] = _tuple_to_color(val)
else:
_config[key] = val
def _get_config_path():
"""Return the path to the settings file (next to the script)."""
try:
script_dir = _os.path.dirname(_os.path.abspath(__file__))
except NameError:
script_dir = "."
return _os.path.join(script_dir, "collimation_settings.cfg")
def save_config():
"""Save current _config to the settings file."""
path = _get_config_path()
try:
lines = ["# Collimation Overlay Settings", "# Edit with care or use the Settings dialog", ""]
for key in sorted(_DEFAULTS.keys()):
val = _config.get(key)
if val is None:
continue
if key in _COLOR_KEYS:
t = _color_to_tuple(val)
lines.append("%s = %d,%d,%d,%d" % (key, t[0], t[1], t[2], t[3]))
elif key in _BOOL_KEYS:
lines.append("%s = %s" % (key, "true" if val else "false"))
elif key in _STRING_KEYS:
lines.append("%s = %s" % (key, val))
elif key in _FLOAT_KEYS:
lines.append("%s = %.2f" % (key, val))
else:
lines.append("%s = %s" % (key, val))
with open(path, "w") as f:
f.write("\n".join(lines) + "\n")
print("[Collimation] Settings saved to: %s" % path)
except Exception as ex:
print("[Collimation] Error saving settings: %s" % str(ex))
def load_config():
"""Load settings from file, overlaying onto current _config."""
path = _get_config_path()
if not _os.path.exists(path):
return
try:
with open(path, "r") as f:
for line in f:
line = line.strip()
if not line or line.startswith("#"):
continue
if "=" not in line:
continue
key, _, val = line.partition("=")
key = key.strip()
val = val.strip()
if key not in _DEFAULTS:
continue
try:
if key in _COLOR_KEYS:
parts = [int(x.strip()) for x in val.split(",")]
if len(parts) == 4:
_config[key] = Color.FromArgb(parts[0], parts[1], parts[2], parts[3])
elif key in _BOOL_KEYS:
_config[key] = val.lower() in ("true", "1", "yes")
elif key in _STRING_KEYS:
_config[key] = val
elif key in _FLOAT_KEYS:
_config[key] = float(val)
else:
_config[key] = int(val)
except:
pass # Skip malformed values, keep default
print("[Collimation] Settings loaded from: %s" % path)
except Exception as ex:
print("[Collimation] Error loading settings: %s" % str(ex))
def reset_config():
"""Reset all settings to defaults and delete the config file."""
_init_config()
path = _get_config_path()
try:
if _os.path.exists(path):
_os.remove(path)
except:
pass
print("[Collimation] Settings reset to defaults")
# Initialize config from defaults, then overlay with saved file
_init_config()
load_config()
# =============================================================================
# INTERNAL STATE
# =============================================================================
# If the script is re-run, clean up the previous instance's event handler,
# toolbar buttons, and settings form to prevent duplicates.
try:
_prev_state = _state
if _prev_state.get("handler_attached"):
try:
cam = SharpCap.SelectedCamera
if cam is not None:
cam.BeforeFrameDisplay -= on_before_frame_display
except:
pass
# Close settings form if open
sf = _prev_state.get("settings_form")
if sf is not None:
try:
sf.Close()
sf.Dispose()
except:
pass
print("[Collimation] Cleaned up previous instance")
except NameError:
pass # First run, no previous _state exists
# Global state dictionary. Using a dict (mutable) so closures/handlers can
# modify values without needing the 'global' keyword (IronPython quirk).
_state = {
"enabled": True,
"outer_cx": None, "outer_cy": None, "outer_r": None, # Smoothed outer circle
"inner_cx": None, "inner_cy": None, "inner_r": None, # Smoothed inner circle
"frame_count": 0,
"handler_attached": False,
"status": "Searching for donut...",
"last_error": "",
"ref_peak": None, # Reference peak brightness from initial detection
"ref_outer_r": None, # Reference outer radius from initial detection (for ROI sizing)
"reject_count": 0, # Consecutive rejected detection count
"debug_log": False, # Toggle with debug_on() / debug_off()
"debug_frames": 0, # Frames logged since debug enabled
"settings_form": None, # Reference to open SettingsForm
"restart_blanking": 0, # Frames remaining to show blank overlay after restart
"brightness_pct": None, # Current brightness as % of max (0-100), None if unknown
}
# =============================================================================
# CIRCLE FITTING - Kasa algebraic method (least squares)
# =============================================================================
# Given a set of (x, y) edge points, fits the best circle (cx, cy, r).
# This is a non-iterative algebraic method that solves a linear system.
# Reference: I. Kasa, "A curve fitting procedure and its error analysis",
# IEEE Trans. Inst. Meas., 1976.
def fit_circle(points):
"""
Fit a circle to a list of (x, y) points using the Kasa algebraic method.
Returns (center_x, center_y, radius) or None if fitting fails.
Requires at least 5 points. Returns None if the system is degenerate
(collinear points) or the fitted radius is too small.
"""
n = len(points)
if n < 5:
return None
# Accumulate sums for the normal equations
sx = sy = sx2 = sy2 = sxy = sx3 = sy3 = sx2y = sxy2 = 0.0
for px, py in points:
x2 = px * px
y2 = py * py
sx += px
sy += py
sx2 += x2
sy2 += y2
sxy += px * py
sx3 += x2 * px
sy3 += y2 * py
sx2y += x2 * py
sxy2 += px * y2
# Build and solve the 2x2 linear system for circle center (cx, cy)
a11 = n * sx2 - sx * sx
a12 = n * sxy - sx * sy
a22 = n * sy2 - sy * sy
b1 = 0.5 * (n * (sx3 + sxy2) - sx * (sx2 + sy2))
b2 = 0.5 * (n * (sx2y + sy3) - sy * (sx2 + sy2))
det = a11 * a22 - a12 * a12
if abs(det) < 1e-10:
return None # Degenerate (collinear points)
cx = (b1 * a22 - a12 * b2) / det
cy = (a11 * b2 - b1 * a12) / det
# Radius = RMS distance from center to all points
sum_r2 = 0.0
for px, py in points:
dx = px - cx
dy = py - cy
sum_r2 += dx * dx + dy * dy
radius = math.sqrt(sum_r2 / n) if sum_r2 > 0 else 0
if radius < _config["MIN_DONUT_RADIUS"] * 0.5:
return None
return (cx, cy, radius)
def reject_outliers_and_refit(points, cx, cy, radius):
"""
Iteratively remove edge points far from the fitted circle, then refit.
Runs up to 3 passes with progressively tighter tolerance to handle
cases where the initial fit is skewed by many outliers (e.g., diffraction
rings causing 70-80px spread on a 90px radius circle).
"""
if len(points) < 10:
return cx, cy, radius
cur_cx, cur_cy, cur_r = cx, cy, radius
# Progressively tighter: 2.0x, 1.5x, 1.2x of base tolerance
for tol_mult in [2.0, 1.5, 1.2]:
tol = (cur_r * 0.10 + 3) * tol_mult
filtered = []
for px, py in points:
dist = math.sqrt((px - cur_cx) ** 2 + (py - cur_cy) ** 2)
if abs(dist - cur_r) < tol:
filtered.append((px, py))
if len(filtered) < 8:
break # Too many rejected at this tolerance, stop tightening
result = fit_circle(filtered)
if result is None:
break
cur_cx, cur_cy, cur_r = result
return cur_cx, cur_cy, cur_r
# =============================================================================
# IMAGE ANALYSIS
# =============================================================================
# The donut detection algorithm:
# 1. Find the approximate center using brightness-weighted centroid
# 2. Refine the center using only bright pixels near the initial centroid
# 3. Cast NUM_RAYS radial rays outward from center. Along each ray, detect:
# - Inner edge: steepest dark-to-bright gradient (secondary shadow boundary)
# - Outer edge: steepest bright-to-dark transition after that (primary mirror boundary)
# 4. Fit circles to the inner and outer edge point sets
# 5. Reject outliers and refit for higher accuracy
# 6. Validate: inner radius < outer radius, centers not too far apart, etc.
def _get_brightness(pixels, offset, bpp, bpc=1):
"""Extract grayscale brightness from raw byte array at given byte offset.
For RGB (bpp>=3), averages the three channels. For mono, returns the byte value.
bpc: bytes per channel (1 for 8-bit, 2 for 16-bit). 16-bit values are
normalized to 0-255 range by shifting right 8 bits."""
if bpc == 2:
if bpp >= 6: # 48bpp RGB or 64bpp ARGB
r = pixels[offset] + pixels[offset + 1] * 256
g = pixels[offset + 2] + pixels[offset + 3] * 256
b = pixels[offset + 4] + pixels[offset + 5] * 256
return (r + g + b) / 3.0 / 256.0
else: # 16bpp mono
return (pixels[offset] + pixels[offset + 1] * 256) / 256.0
if bpp >= 3:
return (pixels[offset] + pixels[offset + 1] + pixels[offset + 2]) / 3.0
else:
return float(pixels[offset])
def _subpixel_gradient(samples, best_idx, cx, cy, cos_a, sin_a):
"""Refine a gradient edge position to sub-pixel accuracy.
Given a ray's samples list [(dist, brightness), ...] and the index of the
steepest gradient, interpolate between the two neighbouring samples to find
the fractional distance where the brightness midpoint occurs.
"""
if best_idx <= 0 or best_idx >= len(samples) - 1:
return float(samples[best_idx][0])
d_before = samples[best_idx - 1][0]
b_before = samples[best_idx - 1][1]
d_after = samples[best_idx][0]
b_after = samples[best_idx][1]
denom = abs(b_before - b_after)
if denom > 0.1:
# Fraction of the step where brightness crosses the midpoint
mid = (b_before + b_after) * 0.5
frac = abs(b_before - mid) / denom
return d_before + frac * (d_after - d_before)
return float(d_after)
def analyze_donut_raw(raw, stride, bpp, width, height, peak_override=None, approx_center=None, bpc=1):
"""
Analyze raw byte pixel data (from LockBits) for a donut pattern.
Args:
raw: .NET byte array containing pixel data
stride: row stride in bytes (may differ from width*bpp due to padding)
bpp: bytes per pixel (1 for mono, 3 for RGB, 4 for ARGB)
width, height: image dimensions in pixels
peak_override: pre-computed peak brightness (from CalculateHistogram)
approx_center: (cx, cy) from previous tracking, skips coarse centroid scan
bpc: bytes per channel (1 for 8-bit, 2 for 16-bit)
Returns:
Dict with inner/outer circle parameters, or None if no donut found.
"""
# --- Step 1: Determine peak brightness and approximate center ---
step = _config["CENTROID_DOWNSAMPLE"]
brightness_thresh_pct = _config["BRIGHTNESS_THRESHOLD_PERCENT"]
if peak_override is not None and approx_center is not None:
# Fastest path: both peak and center provided (histogram + tracking).
peak = peak_override
ref = _state.get("ref_peak")
if ref is not None:
peak = max(peak, ref * 0.5)
threshold = peak * brightness_thresh_pct / 100.0
approx_cx, approx_cy = approx_center
elif approx_center is not None:
# ROI path: center known from tracking, but scan pixels for accurate peak.
approx_cx, approx_cy = approx_center
peak = 0.0
for y in range(0, height, step):
row_off = y * stride
for x in range(0, width, step):
b = _get_brightness(raw, row_off + x * bpp, bpp, bpc)
if b > peak:
peak = b
if peak < 25.0:
return None
ref = _state.get("ref_peak")
if ref is not None:
peak = max(peak, ref * 0.5)
threshold = peak * brightness_thresh_pct / 100.0
else:
# Standard path: scan every Nth pixel for peak brightness and
# brightness-weighted centroid. Used for initial detection.
sum_bx = 0.0
sum_by = 0.0
sum_b = 0.0
peak = 0.0
for y in range(0, height, step):
row_off = y * stride
for x in range(0, width, step):
b = _get_brightness(raw, row_off + x * bpp, bpp, bpc)
if b > peak:
peak = b
if b > 15.0: # Ignore pixels below noise floor
sum_bx += x * b
sum_by += y * b
sum_b += b
if sum_b < 1.0 or peak < 25.0:
return None # No bright object found
approx_cx = sum_bx / sum_b
approx_cy = sum_by / sum_b
threshold = peak * brightness_thresh_pct / 100.0
# --- Step 2: Refine center using only bright pixels near the centroid ---
search_r = int(min(width, height) * 0.3)
x_min = max(0, int(approx_cx) - search_r)
x_max = min(width, int(approx_cx) + search_r)
y_min = max(0, int(approx_cy) - search_r)
y_max = min(height, int(approx_cy) + search_r)
step2 = max(1, step // 2) # Finer sampling for refinement
sum_bx2 = 0.0
sum_by2 = 0.0
sum_b2 = 0.0
for y in range(y_min, y_max, step2):
row_off = y * stride
for x in range(x_min, x_max, step2):
b = _get_brightness(raw, row_off + x * bpp, bpp, bpc)
if b > threshold:
sum_bx2 += x * b
sum_by2 += y * b
sum_b2 += b
if sum_b2 > 0:
approx_cx = sum_bx2 / sum_b2
approx_cy = sum_by2 / sum_b2
# --- Step 3: Cast rays outward from center to find inner/outer edges ---
inner_points = []
outer_points = []
max_ray_len = int(min(width, height) * 0.45)
grad_window = _config["GRAD_WINDOW"]
num_rays = _config["NUM_RAYS"]
ray_step = _config["RAY_STEP"]
algorithm = _config["DETECTION_ALGORITHM"]
for i in range(num_rays):
angle = 2.0 * math.pi * i / num_rays
cos_a = math.cos(angle)
sin_a = math.sin(angle)
# Collect all samples along the ray first
all_samples = [] # (dist, brightness)
for dist in range(3, max_ray_len, ray_step):
px = int(approx_cx + cos_a * dist)
py = int(approx_cy + sin_a * dist)
if px < 0 or px >= width or py < 0 or py >= height:
break
b = _get_brightness(raw, py * stride + px * bpp, bpp, bpc)
all_samples.append((dist, b))
if len(all_samples) < grad_window * 3:
continue
# --- Inner edge: steepest dark-to-bright gradient ---
# Find the steepest rising gradient where the "after" side is bright.
# This is robust to bloom flooding the central shadow with light.
# Use adaptive window: scale down for small images to avoid spanning
# the entire ring width (important for small donuts).
eff_gw = grad_window
if len(all_samples) < grad_window * 6:
eff_gw = max(2, grad_window // 2)
best_inner_grad = 0.0
best_inner_idx = -1
for j in range(eff_gw, len(all_samples) - eff_gw):
avg_before = 0.0
avg_after = 0.0
for k in range(eff_gw):
avg_before += all_samples[j - 1 - k][1]
avg_after += all_samples[j + k][1]
avg_before /= eff_gw
avg_after /= eff_gw
if avg_after > threshold and avg_after > 1.0:
grad = (avg_after - avg_before) / avg_after
if grad > best_inner_grad:
best_inner_grad = grad
best_inner_idx = j
if best_inner_idx >= 0 and best_inner_grad > 0.3:
# Sub-pixel interpolation: refine edge position between samples
inner_dist = _subpixel_gradient(all_samples, best_inner_idx, approx_cx, approx_cy, cos_a, sin_a)
ipx = approx_cx + cos_a * inner_dist
ipy = approx_cy + sin_a * inner_dist
inner_points.append((ipx, ipy))
# Build ray_samples from inner edge onward for outer detection
ray_samples = all_samples[best_inner_idx:]
found_inner = True
else:
ray_samples = []
found_inner = False
# --- Outer edge detection (algorithm-dependent) ---
if algorithm == "threshold_crossing":
# Simple: last point where brightness drops below threshold.
# Fast and reliable for high-contrast donuts with clean edges.
# Sub-pixel: interpolate the exact crossing distance between the
# last above-threshold and first below-threshold samples.
if found_inner and len(ray_samples) > 5:
last_crossing_idx = -1
was_above = False
for j in range(len(ray_samples)):
if ray_samples[j][1] > threshold:
was_above = True
elif was_above:
last_crossing_idx = j
was_above = False
if last_crossing_idx >= 0:
# Interpolate between the above-threshold and below-threshold samples
b_below = ray_samples[last_crossing_idx][1]
d_below = ray_samples[last_crossing_idx][0]
if last_crossing_idx > 0:
b_above = ray_samples[last_crossing_idx - 1][1]
d_above = ray_samples[last_crossing_idx - 1][0]
denom = b_above - b_below
if denom > 0.1:
frac = (b_above - threshold) / denom
cross_dist = d_above + frac * (d_below - d_above)
else:
cross_dist = float(d_below)
else:
cross_dist = float(d_below)
opx = approx_cx + cos_a * cross_dist
opy = approx_cy + sin_a * cross_dist
outer_points.append((opx, opy))
elif algorithm == "steepest_gradient":
# Steepest relative gradient drop, preferring the FIRST strong
# edge over later diffraction-ring gradients. Once a gradient
# exceeds 0.5 we accept it immediately; otherwise we keep the
# best one found across the whole ray (min 0.3).
if found_inner and len(ray_samples) > 4:
o_gw = max(2, min(eff_gw, len(ray_samples) // 4))
best_grad = 0.0
best_idx = -1
for j in range(o_gw, len(ray_samples) - o_gw):
avg_before = 0.0
avg_after = 0.0
for k in range(o_gw):
avg_before += ray_samples[j - 1 - k][1]
avg_after += ray_samples[j + k][1]
avg_before /= o_gw
avg_after /= o_gw
if avg_before > 1.0:
grad = (avg_before - avg_after) / avg_before
else:
grad = 0.0
if grad > best_grad:
best_grad = grad
best_idx = j
if grad > 0.5:
break # strong enough — accept first major drop
if best_idx >= 0 and best_grad > 0.3:
outer_dist = _subpixel_gradient(ray_samples, best_idx, approx_cx, approx_cy, cos_a, sin_a)
opx = approx_cx + cos_a * outer_dist
opy = approx_cy + sin_a * outer_dist
outer_points.append((opx, opy))
else: # "edge_to_dark" (default)
# Steepest gradient that transitions to darkness (below threshold),
# preferring the FIRST strong edge over later diffraction-ring
# gradients. Once a gradient exceeds 0.5 we accept it immediately.
if found_inner and len(ray_samples) > 4:
o_gw = max(2, min(eff_gw, len(ray_samples) // 4))
best_grad = 0.0
best_idx = -1
for j in range(o_gw, len(ray_samples) - o_gw):
avg_before = 0.0
avg_after = 0.0
for k in range(o_gw):
avg_before += ray_samples[j - 1 - k][1]
avg_after += ray_samples[j + k][1]
avg_before /= o_gw
avg_after /= o_gw
if avg_before > 1.0:
grad = (avg_before - avg_after) / avg_before
else:
grad = 0.0
if grad > best_grad and avg_after < threshold:
best_grad = grad
best_idx = j
if grad > 0.5:
break # strong enough — accept first major drop
if best_idx >= 0 and best_grad > 0.3:
outer_dist = _subpixel_gradient(ray_samples, best_idx, approx_cx, approx_cy, cos_a, sin_a)
opx = approx_cx + cos_a * outer_dist
opy = approx_cy + sin_a * outer_dist
outer_points.append((opx, opy))
# Need enough edge points from enough rays for a reliable circle fit
n_inner = len(inner_points)
n_outer = len(outer_points)
if n_inner < 12 or n_outer < 12:
return None
# --- Step 4: Fit circles to the inner and outer edge point sets ---
inner_fit = fit_circle(inner_points)
outer_fit = fit_circle(outer_points)
if inner_fit is None or outer_fit is None:
return None
# Refine by removing outliers (e.g., rays that hit diffraction rings)
inner_fit = reject_outliers_and_refit(inner_points, inner_fit[0], inner_fit[1], inner_fit[2])
outer_fit = reject_outliers_and_refit(outer_points, outer_fit[0], outer_fit[1], outer_fit[2])
icx, icy, ir = inner_fit
ocx, ocy, ore = outer_fit
# --- Sanity checks ---
min_donut_r = _config["MIN_DONUT_RADIUS"]
if ir >= ore:
return None # Inner can't be larger than outer
if ir < min_donut_r * 0.3:
return None # Inner circle too small to be real
center_dist = math.sqrt((icx - ocx) ** 2 + (icy - ocy) ** 2)
if center_dist > ore * 0.5:
return None # Centers too far apart — probably a bad detection
# Compute outer edge point spread (diagnostic: how scattered are the points)
outer_dists = []
for px, py in outer_points:
d = math.sqrt((px - ocx) ** 2 + (py - ocy) ** 2)
outer_dists.append(d)
outer_dists.sort()
outer_spread = outer_dists[-1] - outer_dists[0] if outer_dists else 0
return {
"inner_cx": icx, "inner_cy": icy, "inner_r": ir,
"outer_cx": ocx, "outer_cy": ocy, "outer_r": ore,
"peak": peak,
"threshold": threshold,
"n_inner": n_inner, "n_outer": n_outer,
"outer_spread": outer_spread,
"approx_cx": approx_cx, "approx_cy": approx_cy,
}
def analyze_donut_floats(pixels, width, height):
"""
Same algorithm as analyze_donut_raw but for a flat float array
(one float per pixel). Used by test_synthetic() which generates
float data. Not used in live mode (live uses raw bytes from LockBits).
"""
step = _config["CENTROID_DOWNSAMPLE"]
brightness_thresh_pct = _config["BRIGHTNESS_THRESHOLD_PERCENT"]
num_rays = _config["NUM_RAYS"]
ray_step = _config["RAY_STEP"]
min_donut_r = _config["MIN_DONUT_RADIUS"]
# --- Step 1: Find peak brightness and brightness-weighted centroid ---
sum_bx = 0.0
sum_by = 0.0
sum_b = 0.0
peak = 0.0
for y in range(0, height, step):
row_off = y * width
for x in range(0, width, step):
b = pixels[row_off + x]
if b > peak:
peak = b
if b > 15.0:
sum_bx += x * b
sum_by += y * b
sum_b += b
if sum_b < 1.0 or peak < 25.0:
return None
approx_cx = sum_bx / sum_b
approx_cy = sum_by / sum_b
threshold = peak * brightness_thresh_pct / 100.0
# --- Step 2: Refine center using bright pixels near the centroid ---
search_r = int(min(width, height) * 0.3)
x_min = max(0, int(approx_cx) - search_r)
x_max = min(width, int(approx_cx) + search_r)
y_min = max(0, int(approx_cy) - search_r)
y_max = min(height, int(approx_cy) + search_r)
sum_bx2 = 0.0
sum_by2 = 0.0
sum_b2 = 0.0
step2 = max(1, step // 2)
for y in range(y_min, y_max, step2):
row_off = y * width
for x in range(x_min, x_max, step2):
b = pixels[row_off + x]
if b > threshold:
sum_bx2 += x * b
sum_by2 += y * b
sum_b2 += b
if sum_b2 > 0:
approx_cx = sum_bx2 / sum_b2
approx_cy = sum_by2 / sum_b2
# --- Step 3: Cast rays outward from center to find inner/outer edges ---
inner_points = []
outer_points = []
max_ray_len = int(min(width, height) * 0.45)
grad_window = 5
for i in range(num_rays):
angle = 2.0 * math.pi * i / num_rays
cos_a = math.cos(angle)
sin_a = math.sin(angle)
# Collect all samples along the ray first
all_samples = [] # (dist, brightness)
for dist in range(3, max_ray_len, ray_step):
px = int(approx_cx + cos_a * dist)
py = int(approx_cy + sin_a * dist)
if px < 0 or px >= width or py < 0 or py >= height:
break
b = pixels[py * width + px]
all_samples.append((dist, b))
if len(all_samples) < grad_window * 3:
continue
# --- Inner edge: steepest dark-to-bright gradient ---
# Adaptive window for small donuts
eff_gw = grad_window
if len(all_samples) < grad_window * 6:
eff_gw = max(2, grad_window // 2)
best_inner_grad = 0.0
best_inner_idx = -1
for j in range(eff_gw, len(all_samples) - eff_gw):
avg_before = 0.0
avg_after = 0.0
for k in range(eff_gw):
avg_before += all_samples[j - 1 - k][1]
avg_after += all_samples[j + k][1]
avg_before /= eff_gw
avg_after /= eff_gw
if avg_after > threshold and avg_after > 1.0:
grad = (avg_after - avg_before) / avg_after
if grad > best_inner_grad:
best_inner_grad = grad
best_inner_idx = j
if best_inner_idx >= 0 and best_inner_grad > 0.3:
inner_dist = _subpixel_gradient(all_samples, best_inner_idx, approx_cx, approx_cy, cos_a, sin_a)
ipx = approx_cx + cos_a * inner_dist
ipy = approx_cy + sin_a * inner_dist
inner_points.append((ipx, ipy))
ray_samples = all_samples[best_inner_idx:]
found_inner = True
else:
ray_samples = []
found_inner = False
# Find outer edge as point of steepest relative brightness decline
# Scale window to ray_samples length so search starts near inner edge
if found_inner and len(ray_samples) > 4:
o_gw = max(2, min(eff_gw, len(ray_samples) // 4))
best_grad = 0.0
best_idx = -1
for j in range(o_gw, len(ray_samples) - o_gw):
avg_before = 0.0
avg_after = 0.0
for k in range(o_gw):
avg_before += ray_samples[j - 1 - k][1]
avg_after += ray_samples[j + k][1]
avg_before /= o_gw
avg_after /= o_gw
if avg_before > 1.0:
grad = (avg_before - avg_after) / avg_before
else:
grad = 0.0
if grad > best_grad:
best_grad = grad
best_idx = j
if grad > 0.5:
break # strong enough — accept first major drop
if best_idx >= 0 and best_grad > 0.3:
outer_dist = _subpixel_gradient(ray_samples, best_idx, approx_cx, approx_cy, cos_a, sin_a)
opx = approx_cx + cos_a * outer_dist
opy = approx_cy + sin_a * outer_dist
outer_points.append((opx, opy))
if len(inner_points) < 12 or len(outer_points) < 12:
return None
# --- Step 4: Fit circles to edge points ---
inner_fit = fit_circle(inner_points)
outer_fit = fit_circle(outer_points)
if inner_fit is None or outer_fit is None:
return None
inner_fit = reject_outliers_and_refit(inner_points, inner_fit[0], inner_fit[1], inner_fit[2])
outer_fit = reject_outliers_and_refit(outer_points, outer_fit[0], outer_fit[1], outer_fit[2])
icx, icy, ir = inner_fit
ocx, ocy, ore = outer_fit
if ir >= ore:
return None
if ir < min_donut_r * 0.3:
return None
center_dist = math.sqrt((icx - ocx) ** 2 + (icy - ocy) ** 2)
if center_dist > ore * 0.5:
return None
return {
"inner_cx": icx, "inner_cy": icy, "inner_r": ir,
"outer_cx": ocx, "outer_cy": ocy, "outer_r": ore,
"peak": peak,
}
# =============================================================================
# SMOOTHING
# =============================================================================
def smooth_val(old, new, factor):
"""Apply exponential smoothing: result = old*factor + new*(1-factor)."""
if old is None:
return new
return old * factor + new * (1.0 - factor)
def _validate_detection(result):
"""Check if a new detection is consistent with the current smoothed state."""
old_r = _state["outer_r"]
if old_r is None:
return True # No previous state, accept anything
new_r = result["outer_r"]
radius_change_pct = abs(new_r - old_r) / old_r * 100.0
if radius_change_pct > _config["MAX_RADIUS_CHANGE_PCT"]:
return False
dx = result["outer_cx"] - _state["outer_cx"]
dy = result["outer_cy"] - _state["outer_cy"]
center_jump = math.sqrt(dx * dx + dy * dy)
if center_jump > old_r * _config["MAX_CENTER_JUMP_PCT"] / 100.0:
return False
return True
def update_state_with_result(result):
"""Validate and apply smoothing to new analysis results."""
is_debug = _state.get("debug_log", False)
if not _validate_detection(result):
_state["reject_count"] = _state.get("reject_count", 0) + 1
if is_debug:
print("[DBG] REJECTED frame %d (count=%d) raw_or=%.1f spread=%.1f raw_ocx=%.1f raw_ocy=%.1f | smooth_or=%.1f smooth_ocx=%.1f" % (
_state["frame_count"], _state["reject_count"],
result["outer_r"], result.get("outer_spread", 0),
result["outer_cx"], result["outer_cy"],
_state["outer_r"] or 0, _state["outer_cx"] or 0))
if _state["reject_count"] >= _config["STALE_FRAME_LIMIT"]: