Skip to content

Commit 2f0ff7f

Browse files
authored
Merge pull request #142 from mindsdb/ml-104-improve-time-to-first-token-for-minds
2 parents b0251bd + c497a21 commit 2f0ff7f

File tree

2 files changed

+35
-3
lines changed

2 files changed

+35
-3
lines changed

examples/using_agents_with_retrieval.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,16 +5,15 @@
55
con = mindsdb_sdk.connect()
66

77
open_ai_key = os.getenv('OPENAI_API_KEY')
8-
model_name = 'gpt-4'
8+
model_name = 'gpt-4o'
99

1010
# Now create an agent that will use the model we just created.
1111
agent = con.agents.create(name=f'mindsdb_retrieval_agent_{model_name}_{uuid4().hex}',
12-
model='gpt-4',
12+
model=model_name,
1313
params={'return_context': True})
1414

1515
agent.add_file('./data/tokaido-rulebook.pdf', 'rule book for the board game Tokaido')
1616

17-
1817
question = "what are the rules for the game takaido?"
1918
answer = agent.completion([{'question': question, 'answer': None}])
2019
print(answer.context)
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
import mindsdb_sdk
2+
from uuid import uuid4
3+
import os
4+
5+
con = mindsdb_sdk.connect()
6+
7+
open_ai_key = os.getenv('OPENAI_API_KEY')
8+
model_name = 'gpt-4o'
9+
10+
# Now create an agent that will use the model we just created.
11+
agent = con.agents.create(name=f'mindsdb_retrieval_agent_{model_name}_{uuid4().hex}',
12+
model=model_name,
13+
params={'return_context': True})
14+
15+
agent.add_file('./data/tokaido-rulebook.pdf', 'rule book for the board game Tokaido')
16+
17+
question = "what are the rules for the game takaido?"
18+
19+
# Stream the completion
20+
completion_stream = agent.completion_stream([{'question': question, 'answer': None}])
21+
22+
# Process the streaming response
23+
full_response = ""
24+
for chunk in completion_stream:
25+
print(chunk) # Print the entire chunk for debugging
26+
if isinstance(chunk, dict):
27+
if 'output' in chunk:
28+
full_response += chunk['output']
29+
elif isinstance(chunk, str):
30+
full_response += chunk
31+
32+
print("\n\nFull response:")
33+
print(full_response)

0 commit comments

Comments
 (0)