You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Aider version: 0.60.1
Python version: 3.11.9
Platform: Windows-10-10.0.19045-SP0
Python implementation: CPython
Virtual environment: No
OS: Windows 10 (64bit)
Git version: git version 2.45.2.windows.1
An uncaught exception occurred:
Traceback (most recent call last):
File "openai.py", line 854, in completion
raise e
File "openai.py", line 749, in completion
return self.streaming(
^^^^^^^^^^^^^^^
File "openai.py", line 979, in streaming
headers, response = self.make_sync_openai_chat_completion_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "openai.py", line 649, in make_sync_openai_chat_completion_request
raise e
File "openai.py", line 631, in make_sync_openai_chat_completion_request
raw_response = openai_client.chat.completions.with_raw_response.create(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "_legacy_response.py", line 356, in wrapped
return cast(LegacyAPIResponse[R], func(*args, **kwargs))
^^^^^^^^^^^^^^^^^^^^^
File "_utils.py", line 274, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "completions.py", line 815, in create
return self._post(
^^^^^^^^^^^
File "_base_client.py", line 1277, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "_base_client.py", line 954, in request
return self._request(
^^^^^^^^^^^^^^
File "_base_client.py", line 1058, in _request
raise self._make_status_error_from_response(err.response) from None
openai.APIStatusError: Error code: 413 - {'error': {'code': 'tokens_limit_reached', 'message': 'Request body too large for gpt-4o model. Max size: 8000 tokens.', 'details': None}}
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "main.py", line 1587, in completion
raise e
File "main.py", line 1560, in completion
response = openai_chat_completions.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "openai.py", line 864, in completion
raise OpenAIError(
litellm.llms.OpenAI.openai.OpenAIError: Error code: 413 - {'error': {'code': 'tokens_limit_reached', 'message': 'Request body too large for gpt-4o model. Max size: 8000 tokens.', 'details': None}}
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "base_coder.py", line 1129, in send_message
yield from self.send(messages, functions=self.functions)
File "base_coder.py", line 1414, in send
hash_object, completion = send_completion(
^^^^^^^^^^^^^^^^
File "sendchat.py", line 85, in send_completion
res = litellm.completion(**kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "utils.py", line 1013, in wrapper
raise e
File "utils.py", line 903, in wrapper
result = original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "main.py", line 2999, in completion
raise exception_type(
^^^^^^^^^^^^^^^
File "exception_mapping_utils.py", line 2116, in exception_type
raise e
File "exception_mapping_utils.py", line 404, in exception_type
raise APIError(
litellm.exceptions.APIError: litellm.APIError: APIError: GithubException - Error code: 413 - {'error': {'code': 'tokens_limit_reached', 'message': 'Request body too large for gpt-4o model. Max size: 8000 tokens.', 'details': None}}
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<frozen runpy>", line 198, in _run_module_as_main
File "<frozen runpy>", line 88, in _run_code
File "__main__.py", line 7, in <module>
sys.exit(main())
^^^^^^
File "main.py", line 776, in main
coder.run()
File "base_coder.py", line 730, in run
self.run_one(user_message, preproc)
File "base_coder.py", line 773, in run_one
list(self.send_message(message))
File "base_coder.py", line 1131, in send_message
except retry_exceptions() as err:
^^^^^^^^^^^^^^^^^^
File "sendchat.py", line 32, in retry_exceptions
litellm.llms.anthropic.chat.AnthropicError,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
AttributeError: module 'litellm.llms.anthropic.chat' has no attribute 'AnthropicError'
The text was updated successfully, but these errors were encountered:
Aider version: 0.60.1
Python version: 3.11.9
Platform: Windows-10-10.0.19045-SP0
Python implementation: CPython
Virtual environment: No
OS: Windows 10 (64bit)
Git version: git version 2.45.2.windows.1
An uncaught exception occurred:
The text was updated successfully, but these errors were encountered: