-
-
Notifications
You must be signed in to change notification settings - Fork 124
Description



报错信息:
`> Entering new LLMChain chain...
Prompt after formatting:
You are a chatbot having a conversation with a human.
Human:
我希望你作为```老师```,以```工作报告```为主题生成3个```测试```PPT标题,要求能吸引人的注意。
以下是返回的一些要求:
1.【The response should be a list of 3 items separated by "
" (例如: 香蕉
天气
说明)】
Chatbot:
127.0.0.1 - - [23/May/2024 19:34:11] "POST /generate_title HTTP/1.1" 500 -
Traceback (most recent call last):
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\flask\app.py", line 2213, in call
return self.wsgi_app(environ, start_response)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\flask\app.py", line 2193, in wsgi_app
response = self.handle_exception(e)
^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\flask_cors\extension.py", line 176, in wrapped_function
return cors_after_request(app.make_response(f(args, kwargs)))
^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\flask\app.py", line 2190, in wsgi_app
response = self.full_dispatch_request()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\flask\app.py", line 1486, in full_dispatch_request
rv = self.handle_user_exception(e)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\flask_cors\extension.py", line 176, in wrapped_function
return cors_after_request(app.make_response(f(args, kwargs)))
^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\flask\app.py", line 1484, in full_dispatch_request
rv = self.dispatch_request()
^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\flask\app.py", line 1469, in dispatch_request
return self.ensure_sync(self.view_functions[rule.endpoint])(view_args)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\application.py", line 70, in stream1
return Response(gen_title_v2.predict_title_v2(form, role, title, topic_num),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\generation\gen_ppt_outline.py", line 37, in predict_title_v2
return self.GptChain.predict(text)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\chain\gpt_memory.py", line 49, in predict
return self.llm_chain.predict(human_input=question)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\chains\llm.py", line 252, in predict
return self(kwargs, callbacks=callbacks)[self.output_key]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\chains\base.py", line 243, in call
raise e
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\chains\base.py", line 237, in call
self._call(inputs, run_manager=run_manager)
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\chains\llm.py", line 92, in _call
response = self.generate([inputs], run_manager=run_manager)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\chains\llm.py", line 102, in generate
return self.llm.generate_prompt(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\llms\base.py", line 186, in generate_prompt
return self.generate(prompt_strings, stop=stop, callbacks=callbacks, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\llms\base.py", line 279, in generate
output = self._generate_helper(
^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\llms\base.py", line 223, in _generate_helper
raise e
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\llms\base.py", line 210, in _generate_helper
self.generate(
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\llms\openai.py", line 795, in generate
for stream_resp in completion_with_retry(self, messages=messages, **params):
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\llms\openai.py", line 90, in completion_with_retry
return completion_with_retry(**kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\tenacity_init.py", line 289, in wrapped_f
return self(f, *args, **kw)
^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\tenacity_init.py", line 379, in call
do = self.iter(retry_state=retry_state)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\tenacity_init.py", line 314, in iter
return fut.result()
^^^^^^^^^^^^
File "C:\Program Files\Python311\Lib\concurrent\futures_base.py", line 449, in result
return self.__get_result()
^^^^^^^^^^^^^^^^^^^
File "C:\Program Files\Python311\Lib\concurrent\futures_base.py", line 401, in __get_result
raise self.exception
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\tenacity_init.py", line 382, in call
result = fn(args, kwargs)
^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\langchain\llms\openai.py", line 88, in _completion_with_retry
return llm.client.create(kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\openai\api_resources\chat_completion.py", line 25, in create
return super().create(args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\openai\api_resources\abstract\engine_api_resource.py", line 153, in create
response, _, api_key = requestor.request(
^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\openai\api_requestor.py", line 298, in request
resp, got_stream = self._interpret_response(result, stream)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\openai\api_requestor.py", line 700, in _interpret_response
self._interpret_response_line(
File "D:\study_code\Auto-PPT-latest_branch\venv\Lib\site-packages\openai\api_requestor.py", line 763, in _interpret_response_line
raise self.handle_error_response(
openai.error.AuthenticationError: Incorrect API key provided: sk-GtWPG*********************nP98. You can find your API key at https://platform.openai.com/account/api-keys.`