Skip to content

Commit c7e4ff5

Browse files
committed
fix: Adapt to CrawlerMonitor constructor change
The upstream commit 1630fbd (2025-03-12) introduced a new TerminalUI and modified the CrawlerMonitor. This change updates the codebase to align with the new CrawlerMonitor constructor interface, which no longer requires the `max_visible_rows` and `display_mode` parameters.
1 parent 897e017 commit c7e4ff5

File tree

7 files changed

+18
-71
lines changed

7 files changed

+18
-71
lines changed

deploy/docker/c4ai-code-context.md

+4-12
Original file line numberDiff line numberDiff line change
@@ -8898,9 +8898,7 @@ async def memory_adaptive(urls, browser_config, run_config):
88988898
dispatcher = MemoryAdaptiveDispatcher(
88998899
memory_threshold_percent=70.0,
89008900
max_session_permit=10,
8901-
monitor=CrawlerMonitor(
8902-
max_visible_rows=15, display_mode=DisplayMode.DETAILED
8903-
),
8901+
monitor=CrawlerMonitor(),
89048902
)
89058903
results = await crawler.arun_many(
89068904
urls, config=run_config, dispatcher=dispatcher
@@ -8919,9 +8917,7 @@ async def memory_adaptive_with_rate_limit(urls, browser_config, run_config):
89198917
rate_limiter=RateLimiter(
89208918
base_delay=(1.0, 2.0), max_delay=30.0, max_retries=2
89218919
),
8922-
monitor=CrawlerMonitor(
8923-
max_visible_rows=15, display_mode=DisplayMode.DETAILED
8924-
),
8920+
monitor=CrawlerMonitor(),
89258921
)
89268922
results = await crawler.arun_many(
89278923
urls, config=run_config, dispatcher=dispatcher
@@ -8936,9 +8932,7 @@ async def semaphore(urls, browser_config, run_config):
89368932
async with AsyncWebCrawler(config=browser_config) as crawler:
89378933
dispatcher = SemaphoreDispatcher(
89388934
semaphore_count=5,
8939-
monitor=CrawlerMonitor(
8940-
max_visible_rows=15, display_mode=DisplayMode.DETAILED
8941-
),
8935+
monitor=CrawlerMonitor(),
89428936
)
89438937
results = await crawler.arun_many(
89448938
urls, config=run_config, dispatcher=dispatcher
@@ -8956,9 +8950,7 @@ async def semaphore_with_rate_limit(urls, browser_config, run_config):
89568950
rate_limiter=RateLimiter(
89578951
base_delay=(1.0, 2.0), max_delay=30.0, max_retries=2
89588952
),
8959-
monitor=CrawlerMonitor(
8960-
max_visible_rows=15, display_mode=DisplayMode.DETAILED
8961-
),
8953+
monitor=CrawlerMonitor(),
89628954
)
89638955
results = await crawler.arun_many(
89648956
urls, config=run_config, dispatcher=dispatcher

deploy/docker/c4ai-doc-context.md

+4-19
Original file line numberDiff line numberDiff line change
@@ -6653,13 +6653,7 @@ The CrawlerMonitor provides real-time visibility into crawling operations:
66536653

66546654
```python
66556655
from crawl4ai import CrawlerMonitor, DisplayMode
6656-
monitor = CrawlerMonitor(
6657-
# Maximum rows in live display
6658-
max_visible_rows=15,
6659-
6660-
# DETAILED or AGGREGATED view
6661-
display_mode=DisplayMode.DETAILED
6662-
)
6656+
monitor = CrawlerMonitor()
66636657
```
66646658

66656659
**Display Modes**:
@@ -6687,10 +6681,7 @@ dispatcher = MemoryAdaptiveDispatcher(
66876681
max_delay=30.0,
66886682
max_retries=2
66896683
),
6690-
monitor=CrawlerMonitor( # Optional monitoring
6691-
max_visible_rows=15,
6692-
display_mode=DisplayMode.DETAILED
6693-
)
6684+
monitor=CrawlerMonitor() # Optional monitoring
66946685
)
66956686
```
66966687

@@ -6729,10 +6720,7 @@ dispatcher = SemaphoreDispatcher(
67296720
base_delay=(0.5, 1.0),
67306721
max_delay=10.0
67316722
),
6732-
monitor=CrawlerMonitor( # Optional monitoring
6733-
max_visible_rows=15,
6734-
display_mode=DisplayMode.DETAILED
6735-
)
6723+
monitor=CrawlerMonitor() # Optional monitoring
67366724
)
67376725
```
67386726

@@ -6848,10 +6836,7 @@ async def crawl_with_semaphore(urls):
68486836
base_delay=(0.5, 1.0),
68496837
max_delay=10.0
68506838
),
6851-
monitor=CrawlerMonitor(
6852-
max_visible_rows=15,
6853-
display_mode=DisplayMode.DETAILED
6854-
)
6839+
monitor=CrawlerMonitor()
68556840
)
68566841

68576842
async with AsyncWebCrawler(config=browser_config) as crawler:

docs/examples/dispatcher_example.py

+4-12
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,7 @@ async def memory_adaptive(urls, browser_config, run_config):
2323
dispatcher = MemoryAdaptiveDispatcher(
2424
memory_threshold_percent=70.0,
2525
max_session_permit=10,
26-
monitor=CrawlerMonitor(
27-
max_visible_rows=15, display_mode=DisplayMode.DETAILED
28-
),
26+
monitor=CrawlerMonitor(),
2927
)
3028
results = await crawler.arun_many(
3129
urls, config=run_config, dispatcher=dispatcher
@@ -44,9 +42,7 @@ async def memory_adaptive_with_rate_limit(urls, browser_config, run_config):
4442
rate_limiter=RateLimiter(
4543
base_delay=(1.0, 2.0), max_delay=30.0, max_retries=2
4644
),
47-
monitor=CrawlerMonitor(
48-
max_visible_rows=15, display_mode=DisplayMode.DETAILED
49-
),
45+
monitor=CrawlerMonitor(),
5046
)
5147
results = await crawler.arun_many(
5248
urls, config=run_config, dispatcher=dispatcher
@@ -61,9 +57,7 @@ async def semaphore(urls, browser_config, run_config):
6157
async with AsyncWebCrawler(config=browser_config) as crawler:
6258
dispatcher = SemaphoreDispatcher(
6359
semaphore_count=5,
64-
monitor=CrawlerMonitor(
65-
max_visible_rows=15, display_mode=DisplayMode.DETAILED
66-
),
60+
monitor=CrawlerMonitor(),
6761
)
6862
results = await crawler.arun_many(
6963
urls, config=run_config, dispatcher=dispatcher
@@ -81,9 +75,7 @@ async def semaphore_with_rate_limit(urls, browser_config, run_config):
8175
rate_limiter=RateLimiter(
8276
base_delay=(1.0, 2.0), max_delay=30.0, max_retries=2
8377
),
84-
monitor=CrawlerMonitor(
85-
max_visible_rows=15, display_mode=DisplayMode.DETAILED
86-
),
78+
monitor=CrawlerMonitor(),
8779
)
8880
results = await crawler.arun_many(
8981
urls, config=run_config, dispatcher=dispatcher

docs/examples/proxy_rotation_demo.py

+1-4
Original file line numberDiff line numberDiff line change
@@ -103,10 +103,7 @@ async def demo_proxy_rotation_batch():
103103

104104
print("\n📈 Initializing crawler with proxy rotation...")
105105
async with AsyncWebCrawler(config=browser_config) as crawler:
106-
monitor = CrawlerMonitor(
107-
max_visible_rows=10,
108-
display_mode=DisplayMode.DETAILED
109-
)
106+
monitor = CrawlerMonitor()
110107

111108
dispatcher = MemoryAdaptiveDispatcher(
112109
memory_threshold_percent=80.0,

docs/md_v2/advanced/multi-url-crawling.md

+4-19
Original file line numberDiff line numberDiff line change
@@ -120,13 +120,7 @@ The CrawlerMonitor provides real-time visibility into crawling operations:
120120

121121
```python
122122
from crawl4ai import CrawlerMonitor, DisplayMode
123-
monitor = CrawlerMonitor(
124-
# Maximum rows in live display
125-
max_visible_rows=15,
126-
127-
# DETAILED or AGGREGATED view
128-
display_mode=DisplayMode.DETAILED
129-
)
123+
monitor = CrawlerMonitor()
130124
```
131125

132126
**Display Modes**:
@@ -154,10 +148,7 @@ dispatcher = MemoryAdaptiveDispatcher(
154148
max_delay=30.0,
155149
max_retries=2
156150
),
157-
monitor=CrawlerMonitor( # Optional monitoring
158-
max_visible_rows=15,
159-
display_mode=DisplayMode.DETAILED
160-
)
151+
monitor=CrawlerMonitor() # Optional monitoring
161152
)
162153
```
163154

@@ -196,10 +187,7 @@ dispatcher = SemaphoreDispatcher(
196187
base_delay=(0.5, 1.0),
197188
max_delay=10.0
198189
),
199-
monitor=CrawlerMonitor( # Optional monitoring
200-
max_visible_rows=15,
201-
display_mode=DisplayMode.DETAILED
202-
)
190+
monitor=CrawlerMonitor() # Optional monitoring
203191
)
204192
```
205193

@@ -315,10 +303,7 @@ async def crawl_with_semaphore(urls):
315303
base_delay=(0.5, 1.0),
316304
max_delay=10.0
317305
),
318-
monitor=CrawlerMonitor(
319-
max_visible_rows=15,
320-
display_mode=DisplayMode.DETAILED
321-
)
306+
monitor=CrawlerMonitor()
322307
)
323308

324309
async with AsyncWebCrawler(config=browser_config) as crawler:

tests/async/test_dispatchers.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -153,9 +153,7 @@ async def test_rate_limit_backoff(self, browser_config, run_config):
153153

154154
async def test_monitor_integration(self, browser_config, run_config, test_urls):
155155
async with AsyncWebCrawler(config=browser_config) as crawler:
156-
monitor = CrawlerMonitor(
157-
max_visible_rows=5, display_mode=DisplayMode.DETAILED
158-
)
156+
monitor = CrawlerMonitor()
159157
dispatcher = MemoryAdaptiveDispatcher(max_session_permit=2, monitor=monitor)
160158
results = await crawler.arun_many(
161159
test_urls, config=run_config, dispatcher=dispatcher

tests/memory/test_dispatcher_stress.py

-2
Original file line numberDiff line numberDiff line change
@@ -285,8 +285,6 @@ async def run_memory_stress_test(
285285
# Create monitor with reference to test results
286286
monitor = StressTestMonitor(
287287
test_results=test_results,
288-
display_mode=DisplayMode.DETAILED,
289-
max_visible_rows=20,
290288
total_urls=url_count # Pass total URLs count
291289
)
292290

0 commit comments

Comments
 (0)