You: Hi, are you a chatbot for me?
Traceback (most recent call last):
File "C:\Users\Otp_Lab\Desktop\LXH2022\Fun\chat.py", line 77, in
main()
File "C:\Users\Otp_Lab\Desktop\LXH2022\Fun\chat.py", line 48, in main
response = send_message(message_log)
File "C:\Users\Otp_Lab\Desktop\LXH2022\Fun\chat.py", line 10, in send_message
response = openai.ChatCompletion.create(
File "E:\Python\lib\site-packages\openai\api_resources\chat_completion.py", line 25, in create
return super().create(*args, **kwargs)
File "E:\Python\lib\site-packages\openai\api_resources\abstract\engine_api_resource.py", line 153, in create
response, _, api_key = requestor.request(
File "E:\Python\lib\site-packages\openai\api_requestor.py", line 226, in request
resp, got_stream = self._interpret_response(result, stream)
File "E:\Python\lib\site-packages\openai\api_requestor.py", line 619, in _interpret_response
self._interpret_response_line(
File "E:\Python\lib\site-packages\openai\api_requestor.py", line 679, in _interpret_response_line
raise self.handle_error_response(
openai.error.InvalidRequestError: This model's maximum context length is 4096 tokens. However, you requested 4135 tokens (39 in the messages, 4096 in the completion). Please reduce the length of the messages or completion.