-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathagentGPT.py
More file actions
152 lines (130 loc) · 5.96 KB
/
agentGPT.py
File metadata and controls
152 lines (130 loc) · 5.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
"""
Helper.py
Command-line chatbot interface using the OpenAI API and assistant capabilities.
Configuration is dynamically loaded from a JSON file with user and assistant properties.
Includes support for file uploads and persistent threaded interactions.
By Juan B. Gutiérrez, Professor of Mathematics
University of Texas at San Antonio.
License: Creative Commons Attribution-ShareAlike 4.0 International (CC BY-SA 4.0)
"""
import os
import openai
import json
import sys
import io
# Force UTF-8 encoding for stdout and stderr
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8')
sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8')
class OpenAIChatbot:
def __init__(self, config_file="config.json"):
# Load the JSON configuration from the provided file
with open(config_file, 'r') as file:
raw_config = json.load(file)
config = raw_config['CONFIG'] # Focus on the CONFIG section only
# Extract user and assistant name from configuration
self.user = config['user']
self.name = config['name']
# Prepend custom interaction preamble to instruction text
preamble = f"Please address the user as Beloved {self.user}.\\n\\n Introduce yourself as {self.name}, robot extraordinaire.\\n\\n "
self.instructions = preamble + config['instructions']
# Extract the model identifier used for the assistant
self.model = config['model']
# Get the OpenAI API key from the environment and validate it
openai.api_key = os.getenv("OPENAI_API_KEY")
if not openai.api_key:
print("API key is not set. Please set the OPENAI_API_KEY environment variable.")
exit(1)
# Initialize the OpenAI API client
self.client = openai.OpenAI()
# Create a new assistant with file search tool enabled
self.assistant = self.client.beta.assistants.create(
model=self.model,
instructions=self.instructions,
name=self.name,
tools=[{"type": "file_search"}]
)
# Create a new chat thread for maintaining conversation context
self.thread = self.client.beta.threads.create()
def upload_file(self, file_path):
"""
Uploads a local file to the OpenAI API to be used by the assistant.
Returns the file ID if successful; otherwise, prints an error.
"""
try:
with open(file_path, 'rb') as file_data:
file_object = self.client.files.create(
file=file_data,
purpose='assistants'
)
print(f"File uploaded successfully: ID {file_object.id}")
return file_object.id
except Exception as e:
print(f"Failed to upload file: {e}")
return None
def run_chat(self):
# Display introductory details about the assistant and thread
print("***************** N E W C H A T *****************")
print(f"Assistant: {self.assistant.id}")
print(f"Thread: {self.thread.id}")
# Main chat loop
while True:
print(">>>>>>>>>>>>>>>>>>>>>>>>>>")
# Prompt user input
user_input = input(f"{self.user}: ")
# Exit condition
if user_input.lower() == 'exit':
break
# Handle file upload request
if user_input.startswith("file:"):
file_path = user_input[5:].strip()
file_id = self.upload_file(file_path)
if file_id:
print(f"File ID {file_id} will be used in subsequent requests")
try:
# Attach uploaded file to the conversation thread
message = self.client.beta.threads.messages.create(
thread_id=self.thread.id,
role="user",
content="Query involving an uploaded file.",
attachments=[{"file_id": file_id, "tools": [{"type": "file_search"}]}]
)
continue # Prompt next input after file handling
except Exception as e:
print(f"Failed to upload file: {e}")
try:
# Send user's input as a new message to the thread
my_thread_message = self.client.beta.threads.messages.create(
thread_id=self.thread.id,
role="user",
content=user_input,
)
# Trigger a run with the assistant to process the message
my_run = self.client.beta.threads.runs.create(
thread_id=self.thread.id,
assistant_id=self.assistant.id
)
except Exception as e:
print(f"Error: {e}")
# Polling loop: wait until assistant finishes processing
while my_run.status in ["queued", "in_progress"]:
my_run = self.client.beta.threads.runs.retrieve(
thread_id=self.thread.id,
run_id=my_run.id
)
# Once run is complete, fetch and display the response
if my_run.status == "completed":
all_messages = self.client.beta.threads.messages.list(
thread_id=self.thread.id
)
for message in all_messages.data:
if message.role == "assistant":
print("\n<<<<<<<<<<<<<<<<<<<<<<<<<<")
print("\n" + self.name + f": {message.content[0].text.value}")
break
break # Exit polling loop once assistant replies
else:
print(".", end="", flush=True) # Indicate waiting
# Entry point of the program
if __name__ == "__main__":
agent = OpenAIChatbot()
agent.run_chat()