Skip to content

Commit f106993

Browse files
ei-gradaider (openai/gpt-4.1)
and
aider (openai/gpt-4.1)
committed
fix: add --disable-playwright option to suppress Playwright prompts and usage
Co-authored-by: aider (openai/gpt-4.1) <[email protected]>
1 parent 270e842 commit f106993

File tree

3 files changed

+136
-4
lines changed

3 files changed

+136
-4
lines changed

aider/args.py

+6
Original file line numberDiff line numberDiff line change
@@ -670,6 +670,12 @@ def get_parser(default_config_files, git_root):
670670

671671
######
672672
group = parser.add_argument_group("Other settings")
673+
group.add_argument(
674+
"--disable-playwright",
675+
action="store_true",
676+
help="Never prompt for or attempt to install Playwright for web scraping (default: False).",
677+
default=False,
678+
)
673679
group.add_argument(
674680
"--file",
675681
action="append",

aider/commands.py

+10-4
Original file line numberDiff line numberDiff line change
@@ -220,12 +220,18 @@ def cmd_web(self, args, return_content=False):
220220

221221
self.io.tool_output(f"Scraping {url}...")
222222
if not self.scraper:
223-
res = install_playwright(self.io)
224-
if not res:
225-
self.io.tool_warning("Unable to initialize playwright.")
223+
disable_playwright = getattr(self.args, "disable_playwright", False)
224+
if disable_playwright:
225+
res = False
226+
else:
227+
res = install_playwright(self.io)
228+
if not res:
229+
self.io.tool_warning("Unable to initialize playwright.")
226230

227231
self.scraper = Scraper(
228-
print_error=self.io.tool_error, playwright_available=res, verify_ssl=self.verify_ssl
232+
print_error=self.io.tool_error,
233+
playwright_available=res,
234+
verify_ssl=self.verify_ssl,
229235
)
230236

231237
content = self.scraper.scrape(url) or ""
+120
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,120 @@
1+
import pytest
2+
from unittest.mock import MagicMock
3+
4+
from aider.scrape import install_playwright, Scraper
5+
6+
class DummyIO:
7+
def __init__(self):
8+
self.outputs = []
9+
self.confirmed = False
10+
11+
def tool_output(self, msg):
12+
self.outputs.append(msg)
13+
14+
def confirm_ask(self, msg, default="y"):
15+
self.outputs.append(f"confirm: {msg}")
16+
return self.confirmed
17+
18+
def tool_error(self, msg):
19+
self.outputs.append(f"error: {msg}")
20+
21+
22+
def test_scraper_disable_playwright_flag(monkeypatch):
23+
io = DummyIO()
24+
# Simulate that playwright is not available (disable_playwright just means playwright_available=False)
25+
scraper = Scraper(print_error=io.tool_error, playwright_available=False)
26+
# Patch scrape_with_httpx to check it is called
27+
called = {}
28+
def fake_httpx(url):
29+
called['called'] = True
30+
return "plain text", "text/plain"
31+
scraper.scrape_with_httpx = fake_httpx
32+
content = scraper.scrape("http://example.com")
33+
assert content == "plain text"
34+
assert called['called']
35+
36+
def test_scraper_enable_playwright(monkeypatch):
37+
io = DummyIO()
38+
# Simulate that playwright is available and should be used
39+
scraper = Scraper(print_error=io.tool_error, playwright_available=True)
40+
# Patch scrape_with_playwright to check it is called
41+
called = {}
42+
def fake_playwright(url):
43+
called['called'] = True
44+
return "<html>hi</html>", "text/html"
45+
scraper.scrape_with_playwright = fake_playwright
46+
content = scraper.scrape("http://example.com")
47+
assert content.startswith("hi") or "<html>" in content
48+
assert called['called']
49+
50+
def test_commands_web_disable_playwright(monkeypatch):
51+
"""
52+
Test that Commands.cmd_web does not emit a misleading warning when --disable-playwright is set.
53+
"""
54+
from aider.commands import Commands
55+
56+
# Dummy IO to capture outputs and warnings
57+
class DummyIO:
58+
def __init__(self):
59+
self.outputs = []
60+
self.warnings = []
61+
self.errors = []
62+
def tool_output(self, msg, *a, **k):
63+
self.outputs.append(msg)
64+
def tool_warning(self, msg, *a, **k):
65+
self.warnings.append(msg)
66+
def tool_error(self, msg, *a, **k):
67+
self.errors.append(msg)
68+
def read_text(self, filename, silent=False):
69+
return ""
70+
def confirm_ask(self, *a, **k):
71+
return True
72+
def print(self, *a, **k):
73+
pass
74+
75+
# Dummy coder to satisfy Commands
76+
class DummyCoder:
77+
def __init__(self):
78+
self.cur_messages = []
79+
self.main_model = type("M", (), {"edit_format": "code", "name": "dummy", "info": {}})
80+
def get_rel_fname(self, fname):
81+
return fname
82+
def get_inchat_relative_files(self):
83+
return []
84+
def abs_root_path(self, fname):
85+
return fname
86+
def get_all_abs_files(self):
87+
return []
88+
def get_announcements(self):
89+
return []
90+
def format_chat_chunks(self):
91+
return type("Chunks", (), {"repo": [], "readonly_files": [], "chat_files": []})()
92+
def event(self, *a, **k):
93+
pass
94+
95+
# Patch install_playwright to always return False (simulate not available)
96+
monkeypatch.setattr("aider.scrape.install_playwright", lambda io: False)
97+
98+
# Patch Scraper to always use scrape_with_httpx and never warn
99+
class DummyScraper:
100+
def __init__(self, **kwargs):
101+
self.called = False
102+
def scrape(self, url):
103+
self.called = True
104+
return "dummy content"
105+
106+
monkeypatch.setattr("aider.commands.Scraper", DummyScraper)
107+
108+
io = DummyIO()
109+
coder = DummyCoder()
110+
args = type("Args", (), {"disable_playwright": True})()
111+
commands = Commands(io, coder, args=args)
112+
113+
commands.cmd_web("http://example.com")
114+
# Should not emit a warning about playwright
115+
assert not io.warnings
116+
# Should not contain message "For the best web scraping, install Playwright:"
117+
assert all("install Playwright:" not in msg for msg in io.outputs)
118+
# Should output scraping and added to chat
119+
assert any("Scraping" in msg for msg in io.outputs)
120+
assert any("added to chat" in msg for msg in io.outputs)

0 commit comments

Comments
 (0)