You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
When I run the python3 main.py command my application runs normally and vanna.ai can communicate with ollama, however when I place the same application as a docker container when running the docker compose up command I receive the error below:
vanna.ai | Traceback (most recent call last):
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_transports/default.py", line 69, in map_httpcore_exceptions
vanna.ai | yield
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_transports/default.py", line 233, in handle_request
vanna.ai | resp = self._pool.handle_request(req)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py", line 216, in handle_request
vanna.ai | raise exc from None
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py", line 196, in handle_request
vanna.ai | response = connection.handle_request(
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_sync/connection.py", line 99, in handle_request
vanna.ai | raise exc
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_sync/connection.py", line 76, in handle_request
vanna.ai | stream = self._connect(request)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_sync/connection.py", line 122, in _connect
vanna.ai | stream = self._network_backend.connect_tcp(**kwargs)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_backends/sync.py", line 205, in connect_tcp
vanna.ai | with map_exceptions(exc_map):
vanna.ai | File "/usr/local/lib/python3.12/contextlib.py", line 158, in exit
vanna.ai | self.gen.throw(value)
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_exceptions.py", line 14, in map_exceptions
vanna.ai | raise to_exc(exc) from exc
vanna.ai | httpcore.ConnectError: [Errno 111] Connection refused
vanna.ai |
vanna.ai | The above exception was the direct cause of the following exception:
vanna.ai |
vanna.ai | Traceback (most recent call last):
vanna.ai | File "/app/main.py", line 15, in
vanna.ai | vn = MyVanna(config={'model': 'mistral', 'host': 'ollama', 'port': 11434})
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/app/main.py", line 13, in init
vanna.ai | Ollama.init(self, config=config)
vanna.ai | File "/usr/local/lib/python3.12/site-packages/vanna/ollama/ollama.py", line 34, in init
vanna.ai | self.__pull_model_if_ne(self.ollama_client, self.model)
vanna.ai | File "/usr/local/lib/python3.12/site-packages/vanna/ollama/ollama.py", line 38, in __pull_model_if_ne
vanna.ai | model_response = ollama_client.list()
vanna.ai | ^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/ollama/_client.py", line 465, in list
vanna.ai | return self._request('GET', '/api/tags').json()
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/ollama/_client.py", line 70, in _request
vanna.ai | response = self._client.request(method, url, **kwargs)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_client.py", line 827, in request
vanna.ai | return self.send(request, auth=auth, follow_redirects=follow_redirects)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_client.py", line 914, in send
vanna.ai | response = self._send_handling_auth(
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_client.py", line 942, in _send_handling_auth
vanna.ai | response = self._send_handling_redirects(
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_client.py", line 979, in _send_handling_redirects
vanna.ai | response = self._send_single_request(request)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_client.py", line 1015, in _send_single_request
vanna.ai | response = transport.handle_request(request)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_transports/default.py", line 232, in handle_request
vanna.ai | with map_httpcore_exceptions():
vanna.ai | File "/usr/local/lib/python3.12/contextlib.py", line 158, in exit
vanna.ai | self.gen.throw(value)
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_transports/default.py", line 86, in map_httpcore_exceptions
vanna.ai | raise mapped_exc(message) from exc
vanna.ai | httpx.ConnectError: [Errno 111] Connection refused
This is my file main.py:
`from vanna.remote import VannaDefault
from vanna.flask import VannaFlaskApp
from config.database_config import DatabaseConfig
from vanna.ollama import Ollama
from vanna.chromadb import ChromaDB_VectorStore
dbConfig = DatabaseConfig()
class MyVanna(ChromaDB_VectorStore, Ollama):
def init(self, config=None):
ChromaDB_VectorStore.init(self, config=config)
Ollama.init(self, config=config)
When I run the python3 main.py command my application runs normally and vanna.ai can communicate with ollama, however when I place the same application as a docker container when running the docker compose up command I receive the error below:
vanna.ai | Traceback (most recent call last):
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_transports/default.py", line 69, in map_httpcore_exceptions
vanna.ai | yield
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_transports/default.py", line 233, in handle_request
vanna.ai | resp = self._pool.handle_request(req)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py", line 216, in handle_request
vanna.ai | raise exc from None
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py", line 196, in handle_request
vanna.ai | response = connection.handle_request(
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_sync/connection.py", line 99, in handle_request
vanna.ai | raise exc
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_sync/connection.py", line 76, in handle_request
vanna.ai | stream = self._connect(request)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_sync/connection.py", line 122, in _connect
vanna.ai | stream = self._network_backend.connect_tcp(**kwargs)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_backends/sync.py", line 205, in connect_tcp
vanna.ai | with map_exceptions(exc_map):
vanna.ai | File "/usr/local/lib/python3.12/contextlib.py", line 158, in exit
vanna.ai | self.gen.throw(value)
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpcore/_exceptions.py", line 14, in map_exceptions
vanna.ai | raise to_exc(exc) from exc
vanna.ai | httpcore.ConnectError: [Errno 111] Connection refused
vanna.ai |
vanna.ai | The above exception was the direct cause of the following exception:
vanna.ai |
vanna.ai | Traceback (most recent call last):
vanna.ai | File "/app/main.py", line 15, in
vanna.ai | vn = MyVanna(config={'model': 'mistral', 'host': 'ollama', 'port': 11434})
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/app/main.py", line 13, in init
vanna.ai | Ollama.init(self, config=config)
vanna.ai | File "/usr/local/lib/python3.12/site-packages/vanna/ollama/ollama.py", line 34, in init
vanna.ai | self.__pull_model_if_ne(self.ollama_client, self.model)
vanna.ai | File "/usr/local/lib/python3.12/site-packages/vanna/ollama/ollama.py", line 38, in __pull_model_if_ne
vanna.ai | model_response = ollama_client.list()
vanna.ai | ^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/ollama/_client.py", line 465, in list
vanna.ai | return self._request('GET', '/api/tags').json()
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/ollama/_client.py", line 70, in _request
vanna.ai | response = self._client.request(method, url, **kwargs)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_client.py", line 827, in request
vanna.ai | return self.send(request, auth=auth, follow_redirects=follow_redirects)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_client.py", line 914, in send
vanna.ai | response = self._send_handling_auth(
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_client.py", line 942, in _send_handling_auth
vanna.ai | response = self._send_handling_redirects(
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_client.py", line 979, in _send_handling_redirects
vanna.ai | response = self._send_single_request(request)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_client.py", line 1015, in _send_single_request
vanna.ai | response = transport.handle_request(request)
vanna.ai | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_transports/default.py", line 232, in handle_request
vanna.ai | with map_httpcore_exceptions():
vanna.ai | File "/usr/local/lib/python3.12/contextlib.py", line 158, in exit
vanna.ai | self.gen.throw(value)
vanna.ai | File "/usr/local/lib/python3.12/site-packages/httpx/_transports/default.py", line 86, in map_httpcore_exceptions
vanna.ai | raise mapped_exc(message) from exc
vanna.ai | httpx.ConnectError: [Errno 111] Connection refused
This is my file main.py:
`from vanna.remote import VannaDefault
from vanna.flask import VannaFlaskApp
from config.database_config import DatabaseConfig
from vanna.ollama import Ollama
from vanna.chromadb import ChromaDB_VectorStore
dbConfig = DatabaseConfig()
class MyVanna(ChromaDB_VectorStore, Ollama):
def init(self, config=None):
ChromaDB_VectorStore.init(self, config=config)
Ollama.init(self, config=config)
vn = MyVanna(config={'model': 'mistral'})
vn.connect_to_postgres(host=dbConfig.host,dbname=dbConfig.dbname,user=dbConfig.user,password=dbConfig.password,port=dbConfig.port)
app = VannaFlaskApp(
vn,
allow_llm_to_see_data=True,
show_training_data=True,
sql=True,
table=True,
chart=True,
summarization=False,
ask_results_correct=True,
)
app.run()`
This is my Dockerfile:
`FROM python:3.12.4
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
EXPOSE 8084
CMD ["python3", "main.py"]`
This is my file docker-compose.yaml:
`version: '3.8'
services:
ollama:
image: ollama/ollama:latest
ports:
- 11434:11434
volumes:
- .:/code
- ./ollama/ollama:/root/.ollama
container_name: ollama
pull_policy: always
tty: true
restart: always
environment:
- OLLAMA_KEEP_ALIVE=24h
- OLLAMA_HOST=0.0.0.0
networks:
- ollama-docker
vanna.ai:
image: pedroreckel/vanna_ai_analyst:latest
container_name: vanna.ai
depends_on:
- ollama
ports:
- "8084:8084"
restart: always
networks:
- ollama-docker
networks:
ollama-docker:
external: false`
To Reproduce
Steps to reproduce the behavior:
Expected behavior
The vanna.ai container should go up without any problems like the ollama container.
Desktop (please complete the following information where):
The text was updated successfully, but these errors were encountered: