runtime error

Exit code: 1. Reason: l/lib/python3.10/site-packages/gradio/blocks.py", line 2976, in run_extra_startup_events await startup_event() File "/usr/local/lib/python3.10/site-packages/gradio/helpers.py", line 510, in _start_caching await self.cache() File "/usr/local/lib/python3.10/site-packages/gradio/helpers.py", line 576, in cache prediction = await self.root_block.process_api( File "/usr/local/lib/python3.10/site-packages/gradio/blocks.py", line 2116, in process_api result = await self.call_function( File "/usr/local/lib/python3.10/site-packages/gradio/blocks.py", line 1621, in call_function prediction = await fn(*processed_input) File "/usr/local/lib/python3.10/site-packages/gradio/utils.py", line 882, in async_wrapper response = await f(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/gradio/chat_interface.py", line 1094, in _examples_fn response = await anyio.to_thread.run_sync( File "/usr/local/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync return await get_async_backend().run_sync_in_worker_thread( File "/usr/local/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2485, in run_sync_in_worker_thread return await future File "/usr/local/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 976, in run result = context.run(func, *args) File "/app/app.py", line 13, in chat response = ollama.chat(model='omegarab', messages=[{'role': 'user', 'content': full_prompt}]) File "/usr/local/lib/python3.10/site-packages/ollama/_client.py", line 351, in chat return self._request( File "/usr/local/lib/python3.10/site-packages/ollama/_client.py", line 189, in _request return cls(**self._request_raw(*args, **kwargs).json()) File "/usr/local/lib/python3.10/site-packages/ollama/_client.py", line 135, in _request_raw raise ConnectionError(CONNECTION_ERROR_MESSAGE) from None ConnectionError: Failed to connect to Ollama. Please check that Ollama is downloaded, running and accessible. https://ollama.com/download

Container logs:

Fetching error logs...