2023-06-25 11:27:36 - WARNING! engine is not default parameter. engine was transferred to model_kwargs. Please confirm that engine is what you intended. 2023-06-25 11:27:36 - Your app is available at http://localhost:8000 2023-06-25 23:40:05 - error_code=context_length_exceeded error_message="This model's maximum context length is 8192 tokens. However, your messages resulted in 8218 tokens. Please reduce the length of the messages." error_param=messages error_type=invalid_request_error message='OpenAI API error received' stream_error=False 2023-06-25 23:40:05 - This model's maximum context length is 8192 tokens. However, your messages resulted in 8218 tokens. Please reduce the length of the messages. Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "azure_demo.py", line 24, in main res = await cl.make_async(sync_func)() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/asyncer/_main.py", line 358, in wrapper return await anyio.to_thread.run_sync( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/to_thread.py", line 33, in run_sync return await get_asynclib().run_sync_in_worker_thread( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/_backends/_asyncio.py", line 877, in run_sync_in_worker_thread return await future File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/_backends/_asyncio.py", line 807, in run result = context.run(func, *args) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/sync.py", line 20, in wrapper res = function(*args, **kwargs) File "azure_demo.py", line 35, in sync_func return chat(history) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 208, in __call__ generation = self.generate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 102, in generate raise e File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 94, in generate results = [ File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 95, in self._generate(m, stop=stop, run_manager=run_manager, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 359, in _generate response = self.completion_with_retry(messages=message_dicts, **params) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 307, in completion_with_retry return _completion_with_retry(**kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 289, in wrapped_f return self(f, *args, **kw) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 379, in __call__ do = self.iter(retry_state=retry_state) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 314, in iter return fut.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/concurrent/futures/_base.py", line 439, in result return self.__get_result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/concurrent/futures/_base.py", line 391, in __get_result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 382, in __call__ result = fn(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 305, in _completion_with_retry return self.client.create(**kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/chat_completion.py", line 25, in create return super().create(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 153, in create response, _, api_key = requestor.request( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 230, in request resp, got_stream = self._interpret_response(result, stream) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 624, in _interpret_response self._interpret_response_line( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 687, in _interpret_response_line raise self.handle_error_response( openai.error.InvalidRequestError: This model's maximum context length is 8192 tokens. However, your messages resulted in 8218 tokens. Please reduce the length of the messages. 2023-06-25 23:40:29 - error_code=context_length_exceeded error_message="This model's maximum context length is 8192 tokens. However, your messages resulted in 8239 tokens. Please reduce the length of the messages." error_param=messages error_type=invalid_request_error message='OpenAI API error received' stream_error=False 2023-06-25 23:40:29 - This model's maximum context length is 8192 tokens. However, your messages resulted in 8239 tokens. Please reduce the length of the messages. Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "azure_demo.py", line 24, in main res = await cl.make_async(sync_func)() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/asyncer/_main.py", line 358, in wrapper return await anyio.to_thread.run_sync( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/to_thread.py", line 33, in run_sync return await get_asynclib().run_sync_in_worker_thread( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/_backends/_asyncio.py", line 877, in run_sync_in_worker_thread return await future File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/_backends/_asyncio.py", line 807, in run result = context.run(func, *args) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/sync.py", line 20, in wrapper res = function(*args, **kwargs) File "azure_demo.py", line 35, in sync_func return chat(history) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 208, in __call__ generation = self.generate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 102, in generate raise e File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 94, in generate results = [ File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 95, in self._generate(m, stop=stop, run_manager=run_manager, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 359, in _generate response = self.completion_with_retry(messages=message_dicts, **params) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 307, in completion_with_retry return _completion_with_retry(**kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 289, in wrapped_f return self(f, *args, **kw) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 379, in __call__ do = self.iter(retry_state=retry_state) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 314, in iter return fut.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/concurrent/futures/_base.py", line 439, in result return self.__get_result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/concurrent/futures/_base.py", line 391, in __get_result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 382, in __call__ result = fn(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 305, in _completion_with_retry return self.client.create(**kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/chat_completion.py", line 25, in create return super().create(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 153, in create response, _, api_key = requestor.request( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 230, in request resp, got_stream = self._interpret_response(result, stream) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 624, in _interpret_response self._interpret_response_line( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 687, in _interpret_response_line raise self.handle_error_response( openai.error.InvalidRequestError: This model's maximum context length is 8192 tokens. However, your messages resulted in 8239 tokens. Please reduce the length of the messages. 2023-06-25 23:41:24 - error_code=context_length_exceeded error_message="This model's maximum context length is 8192 tokens. However, your messages resulted in 8260 tokens. Please reduce the length of the messages." error_param=messages error_type=invalid_request_error message='OpenAI API error received' stream_error=False 2023-06-25 23:41:24 - This model's maximum context length is 8192 tokens. However, your messages resulted in 8260 tokens. Please reduce the length of the messages. Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "azure_demo.py", line 24, in main res = await cl.make_async(sync_func)() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/asyncer/_main.py", line 358, in wrapper return await anyio.to_thread.run_sync( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/to_thread.py", line 33, in run_sync return await get_asynclib().run_sync_in_worker_thread( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/_backends/_asyncio.py", line 877, in run_sync_in_worker_thread return await future File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/_backends/_asyncio.py", line 807, in run result = context.run(func, *args) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/sync.py", line 20, in wrapper res = function(*args, **kwargs) File "azure_demo.py", line 35, in sync_func return chat(history) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 208, in __call__ generation = self.generate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 102, in generate raise e File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 94, in generate results = [ File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 95, in self._generate(m, stop=stop, run_manager=run_manager, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 359, in _generate response = self.completion_with_retry(messages=message_dicts, **params) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 307, in completion_with_retry return _completion_with_retry(**kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 289, in wrapped_f return self(f, *args, **kw) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 379, in __call__ do = self.iter(retry_state=retry_state) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 314, in iter return fut.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/concurrent/futures/_base.py", line 439, in result return self.__get_result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/concurrent/futures/_base.py", line 391, in __get_result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 382, in __call__ result = fn(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 305, in _completion_with_retry return self.client.create(**kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/chat_completion.py", line 25, in create return super().create(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 153, in create response, _, api_key = requestor.request( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 230, in request resp, got_stream = self._interpret_response(result, stream) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 624, in _interpret_response self._interpret_response_line( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 687, in _interpret_response_line raise self.handle_error_response( openai.error.InvalidRequestError: This model's maximum context length is 8192 tokens. However, your messages resulted in 8260 tokens. Please reduce the length of the messages. 2023-06-25 23:41:36 - error_code=context_length_exceeded error_message="This model's maximum context length is 8192 tokens. However, your messages resulted in 8281 tokens. Please reduce the length of the messages." error_param=messages error_type=invalid_request_error message='OpenAI API error received' stream_error=False 2023-06-25 23:41:36 - This model's maximum context length is 8192 tokens. However, your messages resulted in 8281 tokens. Please reduce the length of the messages. Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "azure_demo.py", line 24, in main res = await cl.make_async(sync_func)() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/asyncer/_main.py", line 358, in wrapper return await anyio.to_thread.run_sync( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/to_thread.py", line 33, in run_sync return await get_asynclib().run_sync_in_worker_thread( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/_backends/_asyncio.py", line 877, in run_sync_in_worker_thread return await future File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/anyio/_backends/_asyncio.py", line 807, in run result = context.run(func, *args) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/sync.py", line 20, in wrapper res = function(*args, **kwargs) File "azure_demo.py", line 35, in sync_func return chat(history) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 208, in __call__ generation = self.generate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 102, in generate raise e File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 94, in generate results = [ File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/base.py", line 95, in self._generate(m, stop=stop, run_manager=run_manager, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 359, in _generate response = self.completion_with_retry(messages=message_dicts, **params) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 307, in completion_with_retry return _completion_with_retry(**kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 289, in wrapped_f return self(f, *args, **kw) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 379, in __call__ do = self.iter(retry_state=retry_state) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 314, in iter return fut.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/concurrent/futures/_base.py", line 439, in result return self.__get_result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/concurrent/futures/_base.py", line 391, in __get_result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/tenacity/__init__.py", line 382, in __call__ result = fn(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/langchain/chat_models/openai.py", line 305, in _completion_with_retry return self.client.create(**kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/chat_completion.py", line 25, in create return super().create(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 153, in create response, _, api_key = requestor.request( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 230, in request resp, got_stream = self._interpret_response(result, stream) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 624, in _interpret_response self._interpret_response_line( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 687, in _interpret_response_line raise self.handle_error_response( openai.error.InvalidRequestError: This model's maximum context length is 8192 tokens. However, your messages resulted in 8281 tokens. Please reduce the length of the messages. 2023-06-25 23:42:06 - WARNING! engine is not default parameter. engine was transferred to model_kwargs. Please confirm that engine is what you intended. 2023-06-25 23:42:06 - Your app is available at http://localhost:8000 2023-06-26 01:38:25 - WARNING! engine is not default parameter. engine was transferred to model_kwargs. Please confirm that engine is what you intended. 2023-06-26 01:38:25 - Your app is available at http://localhost:8000 2023-06-26 09:06:39 - Your app is available at http://localhost:8000 2023-06-26 09:06:50 - Error communicating with OpenAI Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 980, in _wrap_create_connection return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1065, in create_connection raise exceptions[0] File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1050, in create_connection sock = await self._connect_sock( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 961, in _connect_sock await self.sock_connect(sock, address) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 500, in sock_connect return await fut File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 535, in _sock_connect_cb raise OSError(err, f'Connect call failed {address}') ConnectionRefusedError: [Errno 61] Connect call failed ('127.0.0.1', 7890) The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 592, in arequest_raw result = await session.request(**request_kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/client.py", line 536, in _request conn = await self._connector.connect( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 540, in connect proto = await self._create_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 899, in _create_connection _, proto = await self._create_proxy_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1231, in _create_proxy_connection transport, proto = await self._create_direct_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1206, in _create_direct_connection raise last_exc File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1175, in _create_direct_connection transp, proto = await self._wrap_create_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 988, in _wrap_create_connection raise client_error(req.connection_key, exc) from exc aiohttp.client_exceptions.ClientProxyConnectionError: Cannot connect to host 127.0.0.1:7890 ssl:default [Connect call failed ('127.0.0.1', 7890)] The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "test_app.py", line 30, in main async for stream_resp in await openai.ChatCompletion.acreate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/chat_completion.py", line 45, in acreate return await super().acreate(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 217, in acreate response, _, api_key = await requestor.arequest( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 304, in arequest result = await self.arequest_raw( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 609, in arequest_raw raise error.APIConnectionError("Error communicating with OpenAI") from e openai.error.APIConnectionError: Error communicating with OpenAI 2023-06-26 09:06:58 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=6 request_id=ed0f85bc92201d4be6817a9038d0d453 response_code=200 2023-06-26 09:07:08 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=7 request_id=305b3129efb17749b4021ae459ddfbec response_code=200 2023-06-26 09:11:18 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=160 request_id=4e06d2573d96a2ace8e33e06fb7a6ad5 response_code=200 2023-06-26 09:11:41 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=7 request_id=53be981fc047bc54670becacc8aafb66 response_code=200 2023-06-26 09:12:17 - Response payload is not completed Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "test_app.py", line 30, in main async for stream_resp in await openai.ChatCompletion.acreate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 230, in return ( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 323, in wrap_resp async for r in resp: File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 638, in return ( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 118, in parse_stream_async async for line in rbody: File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/streams.py", line 35, in __anext__ rv = await self.read_func() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/streams.py", line 311, in readline return await self.readuntil() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/streams.py", line 343, in readuntil await self._wait("readuntil") File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/streams.py", line 304, in _wait await waiter File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception aiohttp.client_exceptions.ClientPayloadError: Response payload is not completed 2023-06-26 09:20:25 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=9 request_id=acbb601443a86003d583f15f1cb56099 response_code=200 2023-06-26 09:46:34 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=14 request_id=16096cc72157fede13d3b3835ee81d08 response_code=200 2023-06-26 09:46:43 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=7 request_id=a54078e74002c79d84eea46ca26eebe9 response_code=200 2023-06-26 09:47:33 - Response payload is not completed Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "test_app.py", line 30, in main async for stream_resp in await openai.ChatCompletion.acreate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 230, in return ( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 323, in wrap_resp async for r in resp: File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 638, in return ( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 118, in parse_stream_async async for line in rbody: File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/streams.py", line 35, in __anext__ rv = await self.read_func() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/streams.py", line 311, in readline return await self.readuntil() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/streams.py", line 343, in readuntil await self._wait("readuntil") File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/streams.py", line 304, in _wait await waiter File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception aiohttp.client_exceptions.ClientPayloadError: Response payload is not completed 2023-06-26 09:50:45 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=10 request_id=0c3ac53c343c8fed6875e9a4c9240536 response_code=200 2023-06-26 09:56:08 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=15 request_id=f860b824d4084619f2b49ad0d1bee1f2 response_code=200 2023-06-26 09:58:52 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=19 request_id=08dd77b0b25ac1cf2bb72a1737abbf7b response_code=200 2023-06-26 10:01:01 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=18 request_id=066252cc15dd763ef7828bd51269c5b7 response_code=200 2023-06-26 10:01:27 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=17 request_id=21d306e0a26bc02fb47ee1971b360f02 response_code=200 2023-06-26 11:03:56 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=5 request_id=dc6a15f30d6ac781bd285d4cb10d40ca response_code=200 2023-06-26 11:10:10 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=18 request_id=f9d6788acf332feb4337646fc0fe941b response_code=200 2023-06-26 11:11:34 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=20 request_id=6821c077dfaf64fba51847789c2b95c2 response_code=200 2023-06-26 11:12:37 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=23 request_id=4d1310efe3b65d27a5f9e5f949a35010 response_code=200 2023-06-26 11:18:04 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=7 request_id=df132bc5bf0a2348d8dd712ea3873867 response_code=200 2023-06-26 11:19:52 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=9 request_id=db77c33310595d144a81191dd40e3a92 response_code=200 2023-06-26 11:25:30 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=5 request_id=8ac6ecc57fb5d108b42af6671fe9d035 response_code=200 2023-06-26 11:26:28 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=5 request_id=db67d1cd8a630f92536b03e5e59ab832 response_code=200 2023-06-26 11:27:57 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=7 request_id=0bd5b2d3c3f53cd87b62738c14bfd68c response_code=200 2023-06-26 11:28:50 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=6 request_id=d9470f29b1e1c5e17f12e676f5c15719 response_code=200 2023-06-26 11:33:44 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=16 request_id=f3b4dee55a87b0f2ec47e7b07d99bcf2 response_code=200 2023-06-26 11:34:32 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=20 request_id=2a701abd6c1bd4e24f37550b2b6400e6 response_code=200 2023-06-26 11:39:44 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=10 request_id=74cb39d9b306077a99a7c017333f8348 response_code=200 2023-06-26 12:51:42 - message='OpenAI API response' path=https://api.openai.com/v1/chat/completions processing_ms=5 request_id=84d8e6990587a3b37f9d251cee6c7884 response_code=200 2023-06-26 13:54:58 - Error communicating with OpenAI Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 980, in _wrap_create_connection return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1065, in create_connection raise exceptions[0] File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1050, in create_connection sock = await self._connect_sock( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 961, in _connect_sock await self.sock_connect(sock, address) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 500, in sock_connect return await fut File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 535, in _sock_connect_cb raise OSError(err, f'Connect call failed {address}') ConnectionRefusedError: [Errno 61] Connect call failed ('127.0.0.1', 7890) The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 592, in arequest_raw result = await session.request(**request_kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/client.py", line 536, in _request conn = await self._connector.connect( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 540, in connect proto = await self._create_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 899, in _create_connection _, proto = await self._create_proxy_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1231, in _create_proxy_connection transport, proto = await self._create_direct_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1206, in _create_direct_connection raise last_exc File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1175, in _create_direct_connection transp, proto = await self._wrap_create_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 988, in _wrap_create_connection raise client_error(req.connection_key, exc) from exc aiohttp.client_exceptions.ClientProxyConnectionError: Cannot connect to host 127.0.0.1:7890 ssl:default [Connect call failed ('127.0.0.1', 7890)] The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "test_app.py", line 30, in main async for stream_resp in await openai.ChatCompletion.acreate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/chat_completion.py", line 45, in acreate return await super().acreate(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 217, in acreate response, _, api_key = await requestor.arequest( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 304, in arequest result = await self.arequest_raw( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 609, in arequest_raw raise error.APIConnectionError("Error communicating with OpenAI") from e openai.error.APIConnectionError: Error communicating with OpenAI 2023-06-26 13:55:54 - Error communicating with OpenAI Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 980, in _wrap_create_connection return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1065, in create_connection raise exceptions[0] File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1050, in create_connection sock = await self._connect_sock( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 961, in _connect_sock await self.sock_connect(sock, address) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 500, in sock_connect return await fut File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 535, in _sock_connect_cb raise OSError(err, f'Connect call failed {address}') ConnectionRefusedError: [Errno 61] Connect call failed ('127.0.0.1', 7890) The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 592, in arequest_raw result = await session.request(**request_kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/client.py", line 536, in _request conn = await self._connector.connect( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 540, in connect proto = await self._create_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 899, in _create_connection _, proto = await self._create_proxy_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1231, in _create_proxy_connection transport, proto = await self._create_direct_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1206, in _create_direct_connection raise last_exc File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1175, in _create_direct_connection transp, proto = await self._wrap_create_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 988, in _wrap_create_connection raise client_error(req.connection_key, exc) from exc aiohttp.client_exceptions.ClientProxyConnectionError: Cannot connect to host 127.0.0.1:7890 ssl:default [Connect call failed ('127.0.0.1', 7890)] The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "test_app.py", line 30, in main async for stream_resp in await openai.ChatCompletion.acreate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/chat_completion.py", line 45, in acreate return await super().acreate(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 217, in acreate response, _, api_key = await requestor.arequest( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 304, in arequest result = await self.arequest_raw( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 609, in arequest_raw raise error.APIConnectionError("Error communicating with OpenAI") from e openai.error.APIConnectionError: Error communicating with OpenAI 2023-06-26 13:56:03 - Error communicating with OpenAI Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 980, in _wrap_create_connection return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1065, in create_connection raise exceptions[0] File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1050, in create_connection sock = await self._connect_sock( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 961, in _connect_sock await self.sock_connect(sock, address) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 500, in sock_connect return await fut File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 535, in _sock_connect_cb raise OSError(err, f'Connect call failed {address}') ConnectionRefusedError: [Errno 61] Connect call failed ('127.0.0.1', 7890) The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 592, in arequest_raw result = await session.request(**request_kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/client.py", line 536, in _request conn = await self._connector.connect( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 540, in connect proto = await self._create_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 899, in _create_connection _, proto = await self._create_proxy_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1231, in _create_proxy_connection transport, proto = await self._create_direct_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1206, in _create_direct_connection raise last_exc File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1175, in _create_direct_connection transp, proto = await self._wrap_create_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 988, in _wrap_create_connection raise client_error(req.connection_key, exc) from exc aiohttp.client_exceptions.ClientProxyConnectionError: Cannot connect to host 127.0.0.1:7890 ssl:default [Connect call failed ('127.0.0.1', 7890)] The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "test_app.py", line 30, in main async for stream_resp in await openai.ChatCompletion.acreate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/chat_completion.py", line 45, in acreate return await super().acreate(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 217, in acreate response, _, api_key = await requestor.arequest( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 304, in arequest result = await self.arequest_raw( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 609, in arequest_raw raise error.APIConnectionError("Error communicating with OpenAI") from e openai.error.APIConnectionError: Error communicating with OpenAI 2023-06-26 13:56:41 - Error communicating with OpenAI Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 980, in _wrap_create_connection return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1065, in create_connection raise exceptions[0] File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1050, in create_connection sock = await self._connect_sock( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 961, in _connect_sock await self.sock_connect(sock, address) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 500, in sock_connect return await fut File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 535, in _sock_connect_cb raise OSError(err, f'Connect call failed {address}') ConnectionRefusedError: [Errno 61] Connect call failed ('127.0.0.1', 7890) The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 592, in arequest_raw result = await session.request(**request_kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/client.py", line 536, in _request conn = await self._connector.connect( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 540, in connect proto = await self._create_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 899, in _create_connection _, proto = await self._create_proxy_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1231, in _create_proxy_connection transport, proto = await self._create_direct_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1206, in _create_direct_connection raise last_exc File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1175, in _create_direct_connection transp, proto = await self._wrap_create_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 988, in _wrap_create_connection raise client_error(req.connection_key, exc) from exc aiohttp.client_exceptions.ClientProxyConnectionError: Cannot connect to host 127.0.0.1:7890 ssl:default [Connect call failed ('127.0.0.1', 7890)] The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "test_app.py", line 30, in main async for stream_resp in await openai.ChatCompletion.acreate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/chat_completion.py", line 45, in acreate return await super().acreate(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 217, in acreate response, _, api_key = await requestor.arequest( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 304, in arequest result = await self.arequest_raw( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 609, in arequest_raw raise error.APIConnectionError("Error communicating with OpenAI") from e openai.error.APIConnectionError: Error communicating with OpenAI 2023-06-26 14:12:46 - Error communicating with OpenAI Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 980, in _wrap_create_connection return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1065, in create_connection raise exceptions[0] File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1050, in create_connection sock = await self._connect_sock( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 961, in _connect_sock await self.sock_connect(sock, address) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 500, in sock_connect return await fut File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 535, in _sock_connect_cb raise OSError(err, f'Connect call failed {address}') ConnectionRefusedError: [Errno 61] Connect call failed ('127.0.0.1', 7890) The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 592, in arequest_raw result = await session.request(**request_kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/client.py", line 536, in _request conn = await self._connector.connect( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 540, in connect proto = await self._create_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 899, in _create_connection _, proto = await self._create_proxy_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1231, in _create_proxy_connection transport, proto = await self._create_direct_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1206, in _create_direct_connection raise last_exc File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1175, in _create_direct_connection transp, proto = await self._wrap_create_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 988, in _wrap_create_connection raise client_error(req.connection_key, exc) from exc aiohttp.client_exceptions.ClientProxyConnectionError: Cannot connect to host 127.0.0.1:7890 ssl:default [Connect call failed ('127.0.0.1', 7890)] The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "test_app.py", line 30, in main async for stream_resp in await openai.ChatCompletion.acreate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/chat_completion.py", line 45, in acreate return await super().acreate(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 217, in acreate response, _, api_key = await requestor.arequest( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 304, in arequest result = await self.arequest_raw( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 609, in arequest_raw raise error.APIConnectionError("Error communicating with OpenAI") from e openai.error.APIConnectionError: Error communicating with OpenAI 2023-06-26 14:16:05 - Error communicating with OpenAI Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 980, in _wrap_create_connection return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1065, in create_connection raise exceptions[0] File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 1050, in create_connection sock = await self._connect_sock( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/base_events.py", line 961, in _connect_sock await self.sock_connect(sock, address) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 500, in sock_connect return await fut File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 284, in __await__ yield self # This tells Task to wait for completion. File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/tasks.py", line 328, in __wakeup future.result() File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/futures.py", line 201, in result raise self._exception File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/asyncio/selector_events.py", line 535, in _sock_connect_cb raise OSError(err, f'Connect call failed {address}') ConnectionRefusedError: [Errno 61] Connect call failed ('127.0.0.1', 7890) The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 592, in arequest_raw result = await session.request(**request_kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/client.py", line 536, in _request conn = await self._connector.connect( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 540, in connect proto = await self._create_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 899, in _create_connection _, proto = await self._create_proxy_connection(req, traces, timeout) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1231, in _create_proxy_connection transport, proto = await self._create_direct_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1206, in _create_direct_connection raise last_exc File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 1175, in _create_direct_connection transp, proto = await self._wrap_create_connection( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/aiohttp/connector.py", line 988, in _wrap_create_connection raise client_error(req.connection_key, exc) from exc aiohttp.client_exceptions.ClientProxyConnectionError: Cannot connect to host 127.0.0.1:7890 ssl:default [Connect call failed ('127.0.0.1', 7890)] The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/chainlit/__init__.py", line 61, in wrapper return await user_function(**params_values) File "test_app.py", line 30, in main async for stream_resp in await openai.ChatCompletion.acreate( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/chat_completion.py", line 45, in acreate return await super().acreate(*args, **kwargs) File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 217, in acreate response, _, api_key = await requestor.arequest( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 304, in arequest result = await self.arequest_raw( File "/Users/qinyingjie/miniconda3/envs/py36tf1/lib/python3.9/site-packages/openai/api_requestor.py", line 609, in arequest_raw raise error.APIConnectionError("Error communicating with OpenAI") from e openai.error.APIConnectionError: Error communicating with OpenAI