You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
2024-06-24 16:42:40,847 uvicorn.error 24624 ERROR Exception in ASGI application
Traceback (most recent call last):
File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 269, in call
await wrap(partial(self.listen_for_disconnect, receive))
File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 258, in wrap
await func()
File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 215, in listen_for_disconnect
message = await receive()
^^^^^^^^^^^^^^^
File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\uvicorn\protocols\http\h11_impl.py", line 524, in receive
await self.message_event.wait()
File "E:\Anaconda\envs\langchainchatchat3.0\Lib\asyncio\locks.py", line 213, in wait
await fut
asyncio.exceptions.CancelledError: Cancelled by cancel scope 20e18628510
During handling of the above exception, another exception occurred:
Exception Group Traceback (most recent call last):
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\uvicorn\protocols\http\h11_impl.py", line 396, in run_asgi
| result = await app( # type: ignore[func-returns-value]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\uvicorn\middleware\proxy_headers.py", line 70, in call
| return await self.app(scope, receive, send)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\fastapi\applications.py", line 1054, in call
| await super().call(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\applications.py", line 123, in call
| await self.middleware_stack(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\middleware\errors.py", line 186, in call
| raise exc
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\middleware\errors.py", line 164, in call
| await self.app(scope, receive, _send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\middleware\cors.py", line 83, in call
| await self.app(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\middleware\exceptions.py", line 62, in call
| await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\routing.py", line 758, in call
| await self.middleware_stack(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\routing.py", line 778, in app
| await route.handle(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\routing.py", line 299, in handle
| await self.app(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\routing.py", line 79, in app
| await wrap_app_handling_exceptions(app, request)(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\routing.py", line 77, in app
| await response(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 255, in call
| async with anyio.create_task_group() as task_group:
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\anyio_backends_asyncio.py", line 680, in aexit | raise BaseExceptionGroup(
| ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
+-+---------------- 1 ----------------
| Traceback (most recent call last):
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\model_providers\core\model_runtime\model_providers__base\large_language_model.py", line 450, in _invoke_result_generator
| for chunk in result:
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\model_providers\core\model_runtime\model_providers\deepseek\llm\llm.py", line 819, in _handle_chat_generate_stream_response
| prompt_tokens = self._num_tokens_from_messages(
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\model_providers\core\model_runtime\model_providers\deepseek\llm\llm.py", line 1028, in _num_tokens_from_messages
| raise NotImplementedError(
| NotImplementedError: get_num_tokens_from_messages() is not presently implemented for model cl100k_base.See https://github.com/openai/openai-python/blob/main/chatml.md for information on how messages are converted to tokens.
|
| During handling of the above exception, another exception occurred:
|
| Traceback (most recent call last):
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 258, in wrap
| await func()
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 245, in stream_response
| async for data in self.body_iterator:
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\model_providers\bootstrap_web\message_convert\core.py", line 211, in _stream_openai_chat_completion
| for chunk in response:
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\model_providers\core\model_runtime\model_providers__base\large_language_model.py", line 474, in _invoke_result_generator
| raise self._transform_invoke_error(e)
| model_providers.core.model_runtime.errors.invoke.InvokeError: [deepseek] Error: get_num_tokens_from_messages() is not presently implemented for model cl100k_base.See https://github.com/openai/openai-python/blob/main/chatml.md for information on how messages are converted to tokens.
The text was updated successfully, but these errors were encountered:
imClumsyPanda
changed the title
[BUG] 简洁阐述问题 / Concise description of NotImplementedError: get_num_tokens_from_messages() is not presently implemented for model cl100k_basethe issue
[BUG] NotImplementedError: get_num_tokens_from_messages() is not presently implemented for model cl100k_basethe
Jun 27, 2024
2024-06-24 16:42:40,847 uvicorn.error 24624 ERROR Exception in ASGI application
Traceback (most recent call last):
File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 269, in call
await wrap(partial(self.listen_for_disconnect, receive))
File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 258, in wrap
await func()
File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 215, in listen_for_disconnect
message = await receive()
^^^^^^^^^^^^^^^
File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\uvicorn\protocols\http\h11_impl.py", line 524, in receive
await self.message_event.wait()
File "E:\Anaconda\envs\langchainchatchat3.0\Lib\asyncio\locks.py", line 213, in wait
await fut
asyncio.exceptions.CancelledError: Cancelled by cancel scope 20e18628510
During handling of the above exception, another exception occurred:
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\uvicorn\protocols\http\h11_impl.py", line 396, in run_asgi
| result = await app( # type: ignore[func-returns-value]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\uvicorn\middleware\proxy_headers.py", line 70, in call
| return await self.app(scope, receive, send)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\fastapi\applications.py", line 1054, in call
| await super().call(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\applications.py", line 123, in call
| await self.middleware_stack(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\middleware\errors.py", line 186, in call
| raise exc
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\middleware\errors.py", line 164, in call
| await self.app(scope, receive, _send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\middleware\cors.py", line 83, in call
| await self.app(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\middleware\exceptions.py", line 62, in call
| await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\routing.py", line 758, in call
| await self.middleware_stack(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\routing.py", line 778, in app
| await route.handle(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\routing.py", line 299, in handle
| await self.app(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\routing.py", line 79, in app
| await wrap_app_handling_exceptions(app, request)(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette_exception_handler.py", line 64, in wrapped_app
| raise exc
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette_exception_handler.py", line 53, in wrapped_app
| await app(scope, receive, sender)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\starlette\routing.py", line 77, in app
| await response(scope, receive, send)
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 255, in call
| async with anyio.create_task_group() as task_group:
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\anyio_backends_asyncio.py", line 680, in aexit | raise BaseExceptionGroup(
| ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
+-+---------------- 1 ----------------
| Traceback (most recent call last):
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\model_providers\core\model_runtime\model_providers__base\large_language_model.py", line 450, in _invoke_result_generator
| for chunk in result:
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\model_providers\core\model_runtime\model_providers\deepseek\llm\llm.py", line 819, in _handle_chat_generate_stream_response
| prompt_tokens = self._num_tokens_from_messages(
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\model_providers\core\model_runtime\model_providers\deepseek\llm\llm.py", line 1028, in _num_tokens_from_messages
| raise NotImplementedError(
| NotImplementedError: get_num_tokens_from_messages() is not presently implemented for model cl100k_base.See https://github.com/openai/openai-python/blob/main/chatml.md for information on how messages are converted to tokens.
|
| During handling of the above exception, another exception occurred:
|
| Traceback (most recent call last):
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 258, in wrap
| await func()
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\sse_starlette\sse.py", line 245, in stream_response
| async for data in self.body_iterator:
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\model_providers\bootstrap_web\message_convert\core.py", line 211, in _stream_openai_chat_completion
| for chunk in response:
| File "E:\Anaconda\envs\langchainchatchat3.0\Lib\site-packages\model_providers\core\model_runtime\model_providers__base\large_language_model.py", line 474, in _invoke_result_generator
| raise self._transform_invoke_error(e)
| model_providers.core.model_runtime.errors.invoke.InvokeError: [deepseek] Error: get_num_tokens_from_messages() is not presently implemented for model cl100k_base.See https://github.com/openai/openai-python/blob/main/chatml.md for information on how messages are converted to tokens.
The text was updated successfully, but these errors were encountered: