Description
Hello, I’m trying to setup Weaviate and Ollama, with both running in separate Docker containers, Weaviate is setup as default, and Ollama is can be accessed via port 11435. However when I try to ingest a document using text2vec-ollama I receive a connection refused error.
docker-compose.yaml
version: "3.9"
services:
ollama:
image: gb/ollama
ports:
- 11435:11434
- 11434:11434
restart: unless-stopped
deploy:
resources:
reservations:
devices:
- driver: nvidia
capabilities: ["gpu"]
count: all
volumes:
- "ollama-data:/root/.ollama"
weaviate:
image: gb/weaviate
ports:
- 8080:8080
- 50051:50051
restart: unless-stopped
volumes:
- weaviate-data:/var/lib/weaviate
volumes:
ollama-data:
weaviate-data:
weaviate/Dockerfile
FROM cr.weaviate.io/semitechnologies/weaviate:1.26.1
ENV ENABLE_MODULES="text2vec-ollama"
ENV DEFAULT_VECTORIZER_MODULE="text2vec-ollama"
EXPOSE 8080
CMD [ "--host", "0.0.0.0", "--port", "8080", "--scheme", "http"]
golang program
I am running my client in Go, see below for relevant sections in addition to the full program.
func (store *VectorStore) createSchema() error {
saporoSchema := &models.Class{
Class: "SaporoData2",
Vectorizer: "text2vec-ollama",
}
return store.Client.Schema().ClassCreator().WithClass(saporoSchema).Do(context.Background())
}
func (store *VectorStore) populate() error {
data, err := http.DefaultClient.Get("https://raw.githubusercontent.com/weaviate-tutorials/quickstart/main/data/jeopardy_tiny.json")
if err != nil {
return err
}
defer data.Body.Close()
// Decode the data
var items []map[string]string
if err := json.NewDecoder(data.Body).Decode(&items); err != nil {
panic(err)
}
// convert items into a slice of models.Object
objects := make([]*models.Object, len(items))
for i := range items {
objects[i] = &models.Object{
Class: "SaporoData2",
Properties: map[string]any{
"category": items[i]["Category"],
"question": items[i]["Question"],
"answer": items[i]["Answer"],
},
}
}
// batch write items
batchRes, err := store.Client.Batch().ObjectsBatcher().WithObjects(objects...).Do(context.Background())
if err != nil {
return err
}
for _, res := range batchRes {
if res.Result.Errors != nil {
return fmt.Errorf("batch write failed, %+v", res.Result.Errors.Error[0])
}
}
return nil
}
Full Program
package main
import (
"context"
"encoding/json"
"fmt"
"net/http"
"github.com/sirupsen/logrus"
"github.com/weaviate/weaviate-go-client/v4/weaviate"
"github.com/weaviate/weaviate-go-client/v4/weaviate/graphql"
"github.com/weaviate/weaviate/entities/models"
)
type (
VectorStore struct {
Client *weaviate.Client
Host string
Scheme string
}
)
func (store *VectorStore) connect() error {
cfg := weaviate.Config{
Host: store.Host,
Scheme: "http",
Headers: nil,
}
client, err := weaviate.NewClient(cfg)
if err != nil {
return err
}
// Check the connection
live, err := client.Misc().LiveChecker().Do(context.Background())
if err != nil {
return err
}
if live {
store.Client = client
}
return nil
}
func makeVectorStore(host string) (*VectorStore, error) {
store := VectorStore{
Host: host,
}
if err := store.connect(); err != nil {
logrus.Errorf("unable to connect to vector store, err='%s'", err)
return nil, err
}
return &store, nil
}
func (store *VectorStore) createSchema() error {
saporoSchema := &models.Class{
Class: "SaporoData2",
Vectorizer: "text2vec-ollama",
}
return store.Client.Schema().ClassCreator().WithClass(saporoSchema).Do(context.Background())
}
func (store *VectorStore) populate() error {
data, err := http.DefaultClient.Get("https://raw.githubusercontent.com/weaviate-tutorials/quickstart/main/data/jeopardy_tiny.json")
if err != nil {
return err
}
defer data.Body.Close()
// Decode the data
var items []map[string]string
if err := json.NewDecoder(data.Body).Decode(&items); err != nil {
panic(err)
}
// convert items into a slice of models.Object
objects := make([]*models.Object, len(items))
for i := range items {
objects[i] = &models.Object{
Class: "SaporoData2",
Properties: map[string]any{
"category": items[i]["Category"],
"question": items[i]["Question"],
"answer": items[i]["Answer"],
},
}
}
// batch write items
batchRes, err := store.Client.Batch().ObjectsBatcher().WithObjects(objects...).Do(context.Background())
if err != nil {
return err
}
for _, res := range batchRes {
if res.Result.Errors != nil {
return fmt.Errorf("batch write failed, %+v", res.Result.Errors.Error[0])
}
}
return nil
}
func (store *VectorStore) search(query string) error {
fields := []graphql.Field{
{Name: "question"},
{Name: "answer"},
{Name: "category"},
}
nearText := store.Client.GraphQL().
NearTextArgBuilder().
WithConcepts([]string{query})
result, err := store.Client.GraphQL().Get().
WithClassName("SaporoData2").
WithFields(fields...).
WithNearText(nearText).
WithLimit(2).
Do(context.Background())
if err != nil {
return err
}
fmt.Println("Result Size = ", len(result.Data))
for k, r := range result.Data {
fmt.Println(k, r)
}
return nil
}
func main() {
store, err := makeVectorStore("localhost:8080")
if err != nil {
logrus.Errorf("unable to create store, err='%s'", err)
}
if err := store.createSchema(); err != nil {
logrus.Errorf("unable to create SaporoData schema, err='%s'", err)
}
if err := store.populate(); err != nil {
logrus.Errorf("unable to populate database, err='%s'", err)
}
store.search("biology")
}
error from weaviate
send POST request: Post "http://localhost:11434/api/embeddings": dial tcp [::1]:11434: connect: connection refused
Question
Curious if anyone has any ideas of how I can resolve this? I see that you can specify the endpoint explicitly in the python API but I don’t see that being an option in the go library. Running Ollama locally is not an option as our entire system runs in Docker.
Server Setup Information
- Weaviate Server Version: 1.26.1
- Deployment Method:
- Multi Node? Number of Running Nodes: 1
- Client Language and Version: Golang 1.22
- Multitenancy?: No