Skip to content

Commit 295c8c5

Browse files
author
r0BIT
committed
fix: B3 summary counting, computer SID caching, OpenGraph placement
B3 Summary Counting Logic (skip detection): - Add TaskType.SKIPPED enum value for dual-homed hosts - Add TaskRow.skipped() factory method - Add SKIPPED row when dual-homed detection triggers in online.py - Fix async_runner.py to check for SKIPPED rows instead of empty results - Tests updated for new type and factory method Computer SID Caching: - Add persistent cache for computer name→SID resolution in sid_resolver.py - Uses 'computers' category in SQLite cache - Cache key format: name:{HOSTNAME}:{DOMAIN} - Eliminates redundant LDAP queries for repeated scans OpenGraph Message Placement: - Move JSON auto-generation messages from pre-summary to BloodHound section - Add Rich-formatted 'BloodHound OpenGraph Integration' section header - Use Panel and Rule for visual consistency
1 parent 94b7a45 commit 295c8c5

File tree

6 files changed

+139
-24
lines changed

6 files changed

+139
-24
lines changed

taskhound/cli.py

Lines changed: 22 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -261,25 +261,19 @@ def main():
261261
laps_failures.append(laps_result)
262262

263263
# Exports
264-
# Auto-generate JSON if OpenGraph is enabled and no explicit JSON output was specified
264+
# Track if we need to auto-generate JSON for OpenGraph (defer messages until OpenGraph section)
265+
opengraph_json_path = None
266+
opengraph_json_overwrites = False
265267
if args.bh_opengraph and not args.json:
266268
# Create output directory if it doesn't exist
267269
import os
268270

269271
os.makedirs(args.bh_output, exist_ok=True)
270272

271-
# Generate JSON path
272-
json_path = f"{args.bh_output}/taskhound_data.json"
273-
274-
# Warn if file already exists (will be overwritten)
275-
if os.path.exists(json_path):
276-
warn(f"OpenGraph will overwrite existing file: {json_path}")
277-
278-
# Inform user about auto-generation and how to customize
279-
info(f"Auto-generating JSON for OpenGraph: {json_path}")
280-
info("To use a different path, specify --json <path>")
281-
282-
args.json = json_path
273+
# Generate JSON path (messages will be shown in OpenGraph section)
274+
opengraph_json_path = f"{args.bh_output}/taskhound_data.json"
275+
opengraph_json_overwrites = os.path.exists(opengraph_json_path)
276+
args.json = opengraph_json_path
283277

284278
if args.json:
285279
write_json(args.json, all_rows)
@@ -315,11 +309,24 @@ def main():
315309

316310
# BloodHound OpenGraph Integration
317311
if args.bh_opengraph:
312+
from rich.console import Console
313+
from rich.panel import Panel
314+
318315
from .config_model import BloodHoundConfig
319316

317+
console = Console()
320318
print()
321-
info("BloodHound OpenGraph Integration")
322-
print("-" * 50)
319+
console.print(Panel.fit(
320+
"[bold]BloodHound OpenGraph Integration[/bold]",
321+
border_style="blue",
322+
))
323+
324+
# Show JSON auto-generation messages (deferred from earlier)
325+
if opengraph_json_path:
326+
if opengraph_json_overwrites:
327+
warn(f"OpenGraph will overwrite existing file: {opengraph_json_path}")
328+
info(f"Auto-generating JSON for OpenGraph: {opengraph_json_path}")
329+
info("To use a different path, specify --json <path>")
323330

324331
# Create consolidated config from args
325332
bh_config = BloodHoundConfig.from_args_and_config(args)

taskhound/engine/async_runner.py

Lines changed: 20 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -194,14 +194,21 @@ def _process_single(
194194
for row in target_rows
195195
)
196196

197-
result.success = not has_failure
197+
# Check if target was skipped (dual-homed duplicate)
198+
# process_target adds TaskRow.skipped() rows for dual-homed hosts
199+
has_skipped = any(
200+
row.type == "SKIPPED"
201+
for row in target_rows
202+
)
203+
204+
result.success = not has_failure and not has_skipped
198205
result.lines = lines
199206
result.rows = target_rows
200207
result.laps_result = laps_result
201208

202-
# Detect skipped targets (dual-homed duplicates)
203-
# These return empty lines and no rows, but are not failures
204-
if not has_failure and not lines and not target_rows:
209+
# Mark as skipped only if explicitly flagged via SKIPPED row
210+
# (not just because results are empty after filtering)
211+
if has_skipped:
205212
result.skipped = True
206213

207214
if has_failure:
@@ -399,13 +406,19 @@ def _run_sequential(
399406
for row in target_rows
400407
)
401408

402-
result.success = not has_failure
409+
# Check if target was skipped (dual-homed duplicate)
410+
has_skipped = any(
411+
row.type == "SKIPPED"
412+
for row in target_rows
413+
)
414+
415+
result.success = not has_failure and not has_skipped
403416
result.lines = lines
404417
result.rows = target_rows
405418
result.laps_result = laps_result
406419

407-
# Detect skipped targets (dual-homed duplicates)
408-
if not has_failure and not lines and not target_rows:
420+
# Mark as skipped only if explicitly flagged via SKIPPED row
421+
if has_skipped:
409422
result.skipped = True
410423

411424
if has_failure:

taskhound/engine/online.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -295,6 +295,12 @@ def process_target(
295295
if not was_first:
296296
warn(f"{target}: Skipping - already processed as {previous_target} (dual-homed host: {server_fqdn})")
297297
status(f"[Collecting] {target} [SKIP] (duplicate of {previous_target})")
298+
# Add SKIPPED row so async_runner can detect this was a dual-homed skip
299+
all_rows.append(TaskRow.skipped(
300+
host=server_fqdn,
301+
reason=f"duplicate of {previous_target}",
302+
target_ip=target,
303+
))
298304
# Close SMB connection before returning
299305
if smb:
300306
with contextlib.suppress(Exception):

taskhound/models/task.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ class TaskType(str, Enum):
1515
PRIV = "PRIV"
1616
TASK = "TASK"
1717
FAILURE = "FAILURE"
18+
SKIPPED = "SKIPPED" # Dual-homed duplicate, not processed
1819

1920

2021
@dataclass
@@ -186,3 +187,29 @@ def failure(
186187
type=TaskType.FAILURE.value,
187188
reason=reason,
188189
)
190+
191+
@classmethod
192+
def skipped(
193+
cls,
194+
host: str,
195+
reason: str,
196+
target_ip: Optional[str] = None,
197+
) -> "TaskRow":
198+
"""
199+
Create a SKIPPED row for dual-homed hosts already processed.
200+
201+
Args:
202+
host: Hostname or IP of the skipped target
203+
reason: Skip reason message (e.g., "duplicate of 192.168.1.1")
204+
target_ip: Original target IP/hostname
205+
206+
Returns:
207+
TaskRow with type=SKIPPED
208+
"""
209+
return cls(
210+
host=host,
211+
path="",
212+
target_ip=target_ip,
213+
type=TaskType.SKIPPED.value,
214+
reason=reason,
215+
)

taskhound/utils/sid_resolver.py

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -383,7 +383,7 @@ def resolve_name_to_sid_via_ldap(
383383
) -> Optional[str]:
384384
"""
385385
Resolve a computer name or username to its SID using LDAP.
386-
This is the reverse operation of resolve_sid_via_ldap.
386+
Results are cached persistently to avoid redundant LDAP queries.
387387
NOW SUPPORTS NTLM HASH AUTHENTICATION via Impacket LDAP!
388388
389389
Args:
@@ -399,6 +399,26 @@ def resolve_name_to_sid_via_ldap(
399399
Returns:
400400
SID string (e.g., "S-1-5-21-..."), None if resolution fails
401401
"""
402+
# Check cache first (before any processing)
403+
from ..utils.cache_manager import get_cache
404+
cache = get_cache()
405+
406+
if cache and is_computer:
407+
# Normalize for cache key: strip $ and domain suffix
408+
cache_name = name.upper()
409+
if cache_name.endswith("$"):
410+
cache_name = cache_name[:-1]
411+
if "." in cache_name:
412+
cache_name = cache_name.split(".")[0]
413+
cache_key = f"name:{cache_name}:{domain.upper()}"
414+
415+
cached_sid = cache.get("computers", cache_key)
416+
if cached_sid:
417+
debug(f"Cache hit for computer {name}: {cached_sid}")
418+
return cached_sid
419+
else:
420+
cache_key = None # Only cache computers for now
421+
402422
try:
403423
# Extract just the name part if it's in USER@DOMAIN format
404424
search_name = name
@@ -489,6 +509,9 @@ def resolve_name_to_sid_via_ldap(
489509
if sid_string:
490510
account_name = attributes.get("sAMAccountName") or attributes.get("cn") or name
491511
info(f"Resolved {account_name} to SID {sid_string} via LDAP")
512+
# Cache for future lookups (computers only)
513+
if cache and cache_key:
514+
cache.set("computers", cache_key, sid_string)
492515
return sid_string
493516
else:
494517
debug(f"Failed to convert binary SID to string for {name}")

tests/test_models_task.py

Lines changed: 40 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,9 +24,13 @@ def test_failure_value(self):
2424
"""FAILURE has correct value."""
2525
assert TaskType.FAILURE.value == "FAILURE"
2626

27+
def test_skipped_value(self):
28+
"""SKIPPED has correct value."""
29+
assert TaskType.SKIPPED.value == "SKIPPED"
30+
2731
def test_all_types_exist(self):
2832
"""All expected types exist."""
29-
expected = {"TIER0", "PRIV", "TASK", "FAILURE"}
33+
expected = {"TIER0", "PRIV", "TASK", "FAILURE", "SKIPPED"}
3034
actual = {t.name for t in TaskType}
3135
assert expected == actual
3236

@@ -146,3 +150,38 @@ def test_repr_contains_path(self):
146150
row = TaskRow(host="SERVER", path="\\Windows\\System32\\Tasks\\MyTask")
147151
repr_str = repr(row)
148152
assert "MyTask" in repr_str or "path" in repr_str
153+
154+
155+
class TestTaskRowFactoryMethods:
156+
"""Tests for TaskRow factory methods."""
157+
158+
def test_skipped_factory_basic(self):
159+
"""skipped() creates correct TaskRow."""
160+
row = TaskRow.skipped("SERVER", "Duplicate of OTHER")
161+
assert row.host == "SERVER"
162+
assert row.type == TaskType.SKIPPED
163+
assert row.reason == "Duplicate of OTHER"
164+
assert row.path == ""
165+
166+
def test_skipped_factory_with_target_ip(self):
167+
"""skipped() can include target_ip."""
168+
row = TaskRow.skipped("SERVER", "Dual-homed duplicate", target_ip="192.168.1.100")
169+
assert row.host == "SERVER"
170+
assert row.type == TaskType.SKIPPED
171+
assert row.target_ip == "192.168.1.100"
172+
assert row.reason == "Dual-homed duplicate"
173+
174+
def test_failure_factory_basic(self):
175+
"""failure() creates correct TaskRow."""
176+
row = TaskRow.failure("SERVER", "Connection refused")
177+
assert row.host == "SERVER"
178+
assert row.type == TaskType.FAILURE
179+
assert row.reason == "Connection refused"
180+
assert row.path == ""
181+
182+
def test_failure_factory_with_target_ip(self):
183+
"""failure() can include target_ip."""
184+
row = TaskRow.failure("SERVER", "Access denied", target_ip="10.0.0.1")
185+
assert row.host == "SERVER"
186+
assert row.type == TaskType.FAILURE
187+
assert row.target_ip == "10.0.0.1"

0 commit comments

Comments
 (0)