Skip to content

Commit 72b2ce9

Browse files
committed
ollama patch
1 parent f43ef90 commit 72b2ce9

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

request_llms/bridge_ollama.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="",
7575
# make a POST request to the API endpoint, stream=False
7676
from .bridge_all import model_info
7777
endpoint = model_info[llm_kwargs['llm_model']]['endpoint']
78-
response = requests.post(endpoint, headers=headers, proxies=proxies,
78+
response = requests.post(endpoint, headers=headers, proxies=None,
7979
json=payload, stream=True, timeout=TIMEOUT_SECONDS); break
8080
except requests.exceptions.ReadTimeout as e:
8181
retry += 1
@@ -152,10 +152,12 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
152152
history.append(inputs); history.append("")
153153

154154
retry = 0
155+
if proxies is not None:
156+
logger.error("Ollama不会使用代理服务器, 忽略了proxies的设置。")
155157
while True:
156158
try:
157159
# make a POST request to the API endpoint, stream=True
158-
response = requests.post(endpoint, headers=headers, proxies=proxies,
160+
response = requests.post(endpoint, headers=headers, proxies=None,
159161
json=payload, stream=True, timeout=TIMEOUT_SECONDS);break
160162
except:
161163
retry += 1

0 commit comments

Comments
 (0)